//
// Created by gogoqiu on 2022/3/2.
//

#include <stdio.h>
#include "libavutil/avutil.h"
#include "libavdevice/avdevice.h"
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include <string>
#include <iostream>
#include <libavutil/time.h>

#define AV_CODEC_FLAG_GLOBAL_HEADER (1 << 22)
#define CODEC_FLAG_GLOBAL_HEADER AV_CODEC_FLAG_GLOBAL_HEADER
#define outUrl "rtmp://192.168.5.200/"

using namespace std;

AVFormatContext *ofmt_ctx;
AVStream *video_st;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVPacket enc_pkt;
AVFrame *pFrameYUV;

int64_t start_time;
//count
int frameCount = 0;
int frame_width;
int frame_height;
int yuv_width;
int yuv_height;
int y_length;
int uv_length;
char publish_url[255];

//Output FFmpeg's av_log()
void custom_log(void *ptr, int level, const char *fmt, va_list vl) {
    FILE *fp = fopen("/tmp/av_log.txt", "a+");
    if (fp) {
        vfprintf(fp, fmt, vl);
        fflush(fp);
        fclose(fp);
    }
}

static AVFormatContext* open_dev(){
    int ret = 0;
    char errors[1024] = {0, };

    //ctx
    AVFormatContext *fmt_ctx = nullptr;
    AVDictionary *options = nullptr;

    //[[video device]:[audio device]]
    char *deviceName = "/dev/video0";

    //get format
    AVInputFormat *pInputFormat = av_find_input_format("video4linux2");
    //
    //av_dict_set(&options,"video_size","640x480",0);
    //v4l2-ctl -V
    av_dict_set(&options,"video_size","1280x720",0);
    av_dict_set(&options,"pixel_format","yuyv422", 0);

    //open device
    //AVInputFormat=>AVFormatContext
    if((ret = avformat_open_input(&fmt_ctx, deviceName, pInputFormat, &options)) < 0 ){
        av_strerror(ret, errors, 1024);
        fprintf(stderr, "Failed to open audio device, [%d]%s\n", ret, errors);
        return nullptr;
    }
    //fmt_ctx.
    return fmt_ctx;
}

/*
 * android camera
 */
static int real_initialize( int width, int height, const char* url ){
    //yuv
    frame_height = height;
    frame_width = width;
    //
    strcpy( publish_url, url );

    av_log_set_callback(custom_log);

    av_register_all();
    avformat_network_init();//ws add

    return 0;
}

static int real_start( char* frame_data ){
    int ret;
    int enc_got_frame = 0;
    int i = 0;

    pFrameYUV = av_frame_alloc();//旧版 avcodec_alloc_frame() //分配一个AVFrame结构体。
    //av_image_get_buffer_size()
    //
    auto *out_buffer = (uint8_t *) av_malloc(
            avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
    avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width,
                   pCodecCtx->height);
    memcpy(pFrameYUV->data[0], frame_data, y_length);
    //
    for (i = 0; i < uv_length; i++) {
        *(pFrameYUV->data[2] + i) = *(frame_data + y_length + i * 2);
        *(pFrameYUV->data[1] + i) = *(frame_data + y_length + i * 2 + 1);
    }
    pFrameYUV->format = AV_PIX_FMT_YUV420P;
    pFrameYUV->format = AV_PIX_FMT_YUYV422;
    //
    pFrameYUV->width = frame_width;
    pFrameYUV->height = frame_height;

    enc_pkt.data = nullptr;
    enc_pkt.size = 0;
    av_init_packet(&enc_pkt);//初始化 AVPacker
    ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
    av_frame_free(&pFrameYUV);

    if (enc_got_frame == 1) {
        //LOGI("Succeed to encode frame: %5d\tsize:%5d\n", frameCount, enc_pkt.size);
        frameCount++;
        enc_pkt.stream_index = video_st->index;

        //Write PTS
        AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
        AVRational r_framerate1 = {60, 2};//{ 50, 2 };
        AVRational time_base_q = {1, AV_TIME_BASE};
        //Duration between 2 frames (us)
        int64_t calc_duration = (double) (AV_TIME_BASE) * (1 / av_q2d(r_framerate1));    //内部时间戳
        //Parameters
        //enc_pkt.pts = (double)(frameCount*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
        enc_pkt.pts = av_rescale_q(frameCount * calc_duration, time_base_q, time_base);
        enc_pkt.dts = enc_pkt.pts;
        enc_pkt.duration = av_rescale_q(calc_duration, time_base_q,
                                        time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
        enc_pkt.pos = -1;

        //Delay
        int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
        int64_t now_time = av_gettime() - start_time;
        if (pts_time > now_time)
            av_usleep(pts_time - now_time);
        //
        ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);//将AVPacket（存储视频压缩码流数据）写入文件。
        av_free_packet(&enc_pkt);
    }

    return 0;
}

/*
 * android:
 *  initialize( jint width, jint height, jstring jurlpath )
 * 读取帧，发送帧
 * device->input->AVFormatContext
 * avformat_open_input
 * avformat_alloc_output_context2
 * 获得数据
 * 解析
 * view
 */
static int initialize(){
//FFmpeg av_log() callback
    av_log_set_callback(custom_log);

    av_register_all();
    avformat_network_init();//ws add
    //output initialize
    avformat_alloc_output_context2(&ofmt_ctx, nullptr, "flv", outUrl);
    //output encoder initialize
    pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
    if (!pCodec) {
        //LOGE("Can not find encoder!\n");
        return -1;
    }
    //output
    //build the coder option
    pCodecCtx = avcodec_alloc_context3(pCodec);
    if (!pCodecCtx) {
        //LOGE("Could not allocate video codec context\n");
        return -1;
    }
    //how to setup code context
    //pCodecCtx->pix_fmt =  AV_PIX_FMT_YUV420P;//PIX_FMT_YUV420P新版加
    pCodecCtx->pix_fmt =  AV_PIX_FMT_YUV422P;//PIX_FMT_YUV420P新版加
    //width: 640
    pCodecCtx->width = 640;
    pCodecCtx->height = 480;
    pCodecCtx->time_base.num = 1;
    pCodecCtx->time_base.den = 25;
    pCodecCtx->bit_rate = 400000;
    pCodecCtx->gop_size = 250;
    /* Some formats want stream headers to be separate. */
    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
        pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;

    //H264 codec param
    //pCodecCtx->me_range = 16;
    //pCodecCtx->max_qdiff = 4;
    //pCodecCtx->qcompress = 0.6;
    pCodecCtx->qmin = 10;
    pCodecCtx->qmax = 51;
    //Optional Param
    pCodecCtx->max_b_frames = 0;
    // Set H264 preset and tune
    AVDictionary *param = nullptr;
    //av_dict_set(&param, "preset", "ultrafast", 0);
    //av_dict_set(&param, "tune", "zerolatency", 0);
    av_opt_set(pCodecCtx->priv_data, "preset", "ultrafast", 0);
    av_opt_set(pCodecCtx->priv_data, "tune", "zerolatency", 0);
    //
    if (avcodec_open2(pCodecCtx, pCodec, &param) < 0) {
        //LOGE("Failed to open encoder!\n");
        return -1;
    }

    //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
    //~~
    video_st = avformat_new_stream(ofmt_ctx, pCodec);//avformat_new_stream创建流通道
    if (video_st == nullptr) {
        return -1;
    }
    video_st->time_base.num = 1;
    video_st->time_base.den = 30;
    video_st->codec = pCodecCtx;

    //Open output URL,set before avformat_write_header() for muxing
    if (avio_open(&ofmt_ctx->pb, outUrl, AVIO_FLAG_READ_WRITE) < 0) {
        //LOGE("Failed to open output file!\n");
        return -1;
    }

    //Write File Header
    avformat_write_header(ofmt_ctx, nullptr);

    start_time = av_gettime();
    return 0;
}

int convert(){
    AVFrame *srcFrameYUV = av_frame_alloc();//旧版 avcodec_alloc_frame() //分配一个AVFrame结构体。

    auto *out_buffer = (uint8_t *) av_malloc(
            avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
    avpicture_fill((AVPicture *) srcFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width,
                   pCodecCtx->height);
    //get array
    /*
    jbyte *in = (jbyte *) env->GetByteArrayElements(yuv, 0);
    //fill the frame, y_length,
    memcpy(srcFrameYUV->data[0], in, y_length);
     //
    for (i = 0; i < uv_length; i++) {
        *(pFrameYUV->data[2] + i) = *(in + y_length + i * 2);
        *(pFrameYUV->data[1] + i) = *(in + y_length + i * 2 + 1);
    }
     */
    srcFrameYUV->format = AV_PIX_FMT_YUV420P;
    srcFrameYUV->width = yuv_width;
    srcFrameYUV->height = yuv_height;
}

int start(){
    int ret;
    int enc_got_frame = 0;
    int i = 0;
    //
    pFrameYUV = av_frame_alloc();//旧版 avcodec_alloc_frame() //分配一个AVFrame结构体。
    //av_image_get_buffer_size()
    //
    auto *out_buffer = (uint8_t *) av_malloc(
            avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
    avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width,
                   pCodecCtx->height);

    //安卓摄像头数据为NV21格式，此处将其转换为YUV420P格式
    //??
    //jbyte *in = (jbyte *) env->GetByteArrayElements(yuv, 0);
    //
    /*
    memcpy(pFrameYUV->data[0], in, y_length);
    for (i = 0; i < uv_length; i++) {
        *(pFrameYUV->data[2] + i) = *(in + y_length + i * 2);
        *(pFrameYUV->data[1] + i) = *(in + y_length + i * 2 + 1);
    }*/

    //pFrameYUV->format = AV_PIX_FMT_YUV420P;
    pFrameYUV->format = AV_PIX_FMT_YUYV422;
    pFrameYUV->width = 1280;
    pFrameYUV->height = 720;

    enc_pkt.data = nullptr;
    enc_pkt.size = 0;
    av_init_packet(&enc_pkt);//初始化 AVPacker
    ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
    av_frame_free(&pFrameYUV);

    if (enc_got_frame == 1) {
        //LOGI("Succeed to encode frame: %5d\tsize:%5d\n", frameCount, enc_pkt.size);
        frameCount++;
        enc_pkt.stream_index = video_st->index;

        //Write PTS
        AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
        AVRational r_framerate1 = {60, 2};//{ 50, 2 };
        AVRational time_base_q = {1, AV_TIME_BASE};
        //Duration between 2 frames (us)
        int64_t calc_duration = (double) (AV_TIME_BASE) * (1 / av_q2d(r_framerate1));    //内部时间戳
        //Parameters
        //enc_pkt.pts = (double)(frameCount*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
        enc_pkt.pts = av_rescale_q(frameCount * calc_duration, time_base_q, time_base);
        enc_pkt.dts = enc_pkt.pts;
        enc_pkt.duration = av_rescale_q(calc_duration, time_base_q,
                                        time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
        enc_pkt.pos = -1;

        //Delay
        int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
        int64_t now_time = av_gettime() - start_time;
        if (pts_time > now_time)
            av_usleep(pts_time - now_time);
        //
        ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);//将AVPacket（存储视频压缩码流数据）写入文件。
        av_free_packet(&enc_pkt);
    }

    return 0;
}

int stop(){

}

int main(int argc, char *argv[]){

    return 0;
}