use std::ffi::CString;

use ffmpeg_sys::{
    self, av_dump_format, av_gettime, av_interleaved_write_frame, av_packet_unref, av_q2d,
    av_read_frame, av_register_all, av_rescale_q, av_rescale_q_rnd, av_usleep, av_write_trailer,
    avcodec_parameters_copy, avformat_alloc_output_context2, avformat_close_input,
    avformat_find_stream_info, avformat_free_context, avformat_network_init, avformat_new_stream,
    avformat_open_input, avformat_write_header, avio_close, avio_open, AVCodecParameters,
    AVFormatContext, AVMediaType, AVOutputFormat, AVPacket, AVRational, AVRounding, AVStream,
    AVFMT_GLOBALHEADER, AVFMT_NOFILE, AVIO_FLAG_WRITE, AV_CODEC_FLAG_GLOBAL_HEADER, AV_NOPTS_VALUE,
    AV_TIME_BASE,AVDictionary,av_dict_set
};

#[repr(i32)]
#[derive(Debug)]
enum ErrCode {
    AVFormatOpenInput = -1,
    AVFormatFindStreamInfo = -2,
    AVFormatAllocOutputContext2 = -3,
    AVFormatNewStream = -4,
    AVCodecParametersCopy = -5,
    AVIOOpen = -6,
    AVFormatWriteHeader = -7,
}

unsafe fn close(ifmt_ctx: *mut *mut AVFormatContext, ofmt_ctx: *mut AVFormatContext) {
    avformat_close_input(ifmt_ctx);
    if !ofmt_ctx.is_null() {
        avio_close((*ofmt_ctx).pb);
        avformat_free_context(ofmt_ctx);
    }
}

unsafe fn remux(input_path: &str, output_path: &str) -> std::result::Result<(), ErrCode> {
    let mut video_index: isize = -1;
    let mut frame_index: i64 = 0;

    let input_path_cstr = CString::new(input_path).expect("to c str");
    let output_path_cstr = CString::new(output_path).expect("to c str");

    av_register_all();
    avformat_network_init();

    let mut ifmt_ctx: *mut AVFormatContext = std::ptr::null_mut();
    let mut ofmt_ctx: *mut AVFormatContext = std::ptr::null_mut();

    let mut opts: *mut AVDictionary = std::ptr::null_mut();
    let flvflags_key = CString::new("flvflags").expect("to c str");
    let flvflags_value = CString::new("no_duration_filesize").expect("to c str");
    av_dict_set(&mut opts, flvflags_key.as_ptr(), flvflags_value.as_ptr(), 0);

    let stimeout_key = CString::new("stimeout").expect("to c str");
    let stimeout_value = CString::new((2 * 1000000).to_string()).expect("to c str");
    av_dict_set(&mut opts, stimeout_key.as_ptr(), stimeout_value.as_ptr(), 0);

    let rtsp_transport_key = CString::new("rtsp_transport").expect("to c str");
    let rtsp_transport_value = CString::new("tcp").expect("to c str");
    av_dict_set(&mut opts, rtsp_transport_key.as_ptr(), rtsp_transport_value.as_ptr(), 0);

    // 打开文件
    if avformat_open_input(
        &mut ifmt_ctx,
        input_path_cstr.as_ptr(),
        std::ptr::null_mut(),
        &mut opts,
    ) < 0
    {
        close(&mut ifmt_ctx, ifmt_ctx);
        return Err(ErrCode::AVFormatOpenInput);
    }

    // 获取音频视频的信息.h264 flv 没有头信息。成功返回>=0
    if avformat_find_stream_info(ifmt_ctx, std::ptr::null_mut()) < 0 {
        close(&mut ifmt_ctx, ifmt_ctx);
        return Err(ErrCode::AVFormatFindStreamInfo);
    }

    // 打印视频信息
    av_dump_format(ifmt_ctx, 0, input_path_cstr.as_ptr(), 0);

    // 输出
    let format_name_cstr = CString::new("flv").expect("to c str");

    // 创建输出上下文
    avformat_alloc_output_context2(
        &mut ofmt_ctx,
        std::ptr::null_mut(),
        format_name_cstr.as_ptr(),
        output_path_cstr.as_ptr(),
    );

    if ofmt_ctx.is_null() {
        close(&mut ifmt_ctx, ifmt_ctx);
        return Err(ErrCode::AVFormatAllocOutputContext2);
    }

    let input_streams = {
        let len = (*ifmt_ctx).nb_streams as usize;
        // 根据指针和长度形成一个切片
        // len参数是元素数，而不是字节数
        std::slice::from_raw_parts((*ifmt_ctx).streams, len)
            .iter()
            .map(|x| (*x).as_ref().expect("not null"))
            .collect::<Vec<&AVStream>>()
    };

    for (index, in_stream) in input_streams.iter().enumerate() {
        let in_codecpar: *mut AVCodecParameters = (*in_stream).codecpar;

        if (*in_codecpar).codec_type != AVMediaType::AVMEDIA_TYPE_AUDIO
            && (*in_codecpar).codec_type != AVMediaType::AVMEDIA_TYPE_VIDEO
            && (*in_codecpar).codec_type != AVMediaType::AVMEDIA_TYPE_SUBTITLE
        {
            continue;
        }

        if (*in_codecpar).codec_type == AVMediaType::AVMEDIA_TYPE_VIDEO {
            video_index += index as isize;
        }

        // 给输出上下文创建一个流
        let out_stream = avformat_new_stream(ofmt_ctx, (*(*in_stream).codec).codec);
        if out_stream.is_null() {
            close(&mut ifmt_ctx, ifmt_ctx);
            return Err(ErrCode::AVFormatNewStream);
        }
        // 将输入流的内容复制到输出流中，成功返回>=0
        if avcodec_parameters_copy((*out_stream).codecpar, (*in_stream).codecpar) < 0 {
            close(&mut ifmt_ctx, ifmt_ctx);
            return Err(ErrCode::AVCodecParametersCopy);
        }
        (*(*out_stream).codec).codec_tag = 0;

        if ((*(*ofmt_ctx).oformat).flags as i32 & AVFMT_GLOBALHEADER as i32) != 0 {
            (*(*out_stream).codec).flags |= AV_CODEC_FLAG_GLOBAL_HEADER as i32;
        }
    }

    av_dump_format(ofmt_ctx, 0, output_path_cstr.as_ptr(), 1);

    let ofmt: *mut AVOutputFormat = (*ofmt_ctx).oformat;
    // 打开输出IO
    if (*ofmt).flags as i32 & AVFMT_NOFILE as i32 == 0 {
        if avio_open(
            &mut (*ofmt_ctx).pb,
            output_path_cstr.as_ptr(),
            AVIO_FLAG_WRITE,
        ) < 0
        {
            close(&mut ifmt_ctx, ifmt_ctx);
            return Err(ErrCode::AVIOOpen);
        }
    }

    // 写入头部信息
    if avformat_write_header(ofmt_ctx, &mut opts) < 0 {
        close(&mut ifmt_ctx, ifmt_ctx);
        return Err(ErrCode::AVFormatWriteHeader);
    }

    let mut pkt: AVPacket = std::mem::zeroed();
    let start_time = av_gettime();
    let mut prev_pts: i64 = 0;
    let mut prev_dts: i64 = 0;

    // 推每一帧数据
    loop {
        if av_read_frame(ifmt_ctx, &mut pkt) < 0 {
            break;
        }

        // 没有实际显示（比如未解码的 H.264）
        if pkt.pts == AV_NOPTS_VALUE {
            let in_stream = (*(*ifmt_ctx).streams).offset(video_index);
            let time_base = (*in_stream).time_base;

            // 计算两帧之前的时间
            // r_frame_rate 基流帧速率
            // av_q2d 转换为f64类型
            let calc_duration = AV_TIME_BASE as f64 / av_q2d((*in_stream).r_frame_rate);

            // 配置参数
            pkt.pts = ((frame_index as f64 * calc_duration) / av_q2d(time_base)
                * AV_TIME_BASE as f64) as i64;
            pkt.dts = pkt.pts;
            pkt.duration = (calc_duration / av_q2d(time_base) * AV_TIME_BASE as f64) as i64;
        }

        // 延时
        if pkt.stream_index as i32 == video_index as i32 {
            let in_stream = (*(*ifmt_ctx).streams).offset(video_index);
            let time_base = (*in_stream).time_base;
            let time_base_q = AVRational {
                num: 1,
                den: AV_TIME_BASE,
            };

            // 计算视频的播放时间
            let pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
            // 计算实际视频的播放时间
            let now_time = av_gettime() - start_time;
            if pts_time > now_time {
                av_usleep((pts_time - now_time) as u32);
            }
        }

        let in_stream: *mut AVStream = (*(*ifmt_ctx).streams).offset(pkt.stream_index as isize);
        let out_stream: *mut AVStream = (*(*ofmt_ctx).streams).offset(pkt.stream_index as isize);

        // 计算转换时间戳 pts dts
        // 获取时间基数
        let itime: AVRational = (*in_stream).time_base;
        let otime: AVRational = (*out_stream).time_base;
        pkt.pts = av_rescale_q_rnd(pkt.pts, itime, otime, AVRounding::AV_ROUND_NEAR_INF);
        pkt.dts = av_rescale_q_rnd(pkt.dts, itime, otime, AVRounding::AV_ROUND_NEAR_INF);
        // 到这一帧经历了多长时间
        pkt.duration = av_rescale_q(pkt.duration, itime, otime);
        pkt.pos = -1;

        if pkt.stream_index as i32 == video_index as i32 {
            frame_index += 1;
        }

        if prev_pts >= pkt.pts ||  prev_dts >= pkt.dts{
            continue;
        }
        prev_pts = pkt.pts;
        prev_dts = pkt.dts;

        // 推送
        let ret = av_interleaved_write_frame(ofmt_ctx, &mut pkt);
        if ret < 0 {
            break;
        }
        av_packet_unref(&mut pkt);
    }
    av_write_trailer(ofmt_ctx);
    Ok(())
}

fn main() -> std::result::Result<(), i32> {
    let input_path = std::env::args().nth(1).unwrap();
    let output_path = std::env::args().nth(2).unwrap();
    unsafe {
        match remux(&input_path, &output_path) {
            Ok(_) => Ok(()),
            Err(e) => Err(e as i32),
        }
    }
}
