//https://feater.top/ffmpeg/ffmpeg-capature-camera-data-and-encode-yuv-to-h264-under-linux/
//https://feater.top/ffmpeg/ffmpeg-encode-h264-to-mp4/
use crate::{CameraPacket, CustomError, DecoderData};
use ffmpeg_sys_next::{
    av_dump_format, av_find_input_format, av_frame_alloc, av_frame_free, av_image_fill_arrays,
    av_image_get_buffer_size, av_interleaved_write_frame, av_malloc, av_packet_alloc,
    av_packet_unref, av_q2d, av_read_frame, av_rescale_q, av_rescale_q_rnd, av_write_trailer,
    avcodec_alloc_context3, avcodec_find_decoder, avcodec_find_encoder, avcodec_open2,
    avcodec_parameters_to_context, avcodec_receive_frame, avcodec_receive_packet,
    avcodec_send_frame, avcodec_send_packet, avformat_alloc_context,
    avformat_alloc_output_context2, avformat_free_context, avformat_new_stream,
    avformat_open_input, avformat_write_header, avio_open, sws_getContext, sws_scale, AVCodec,
    AVCodecContext, AVCodecID, AVCodecParameters, AVFormatContext, AVInputFormat, AVMediaType,
    AVOutputFormat, AVPacket, AVPixelFormat, AVStream, SwsContext, AVERROR, AVERROR_EOF,
    AVIO_FLAG_READ_WRITE, AV_NOPTS_VALUE, AV_TIME_BASE, EAGAIN, SWS_BICUBIC, SWS_BILINEAR,
};
use sdl2::{
    event::Event, keyboard::Keycode, pixels::PixelFormatEnum, rect::Rect, Sdl, VideoSubsystem,
};
use std::{
    ffi::{c_char, CString},
    os::raw::c_int,
    ptr::null_mut,
    sync::{
        atomic::{AtomicBool, Ordering},
        Arc,
    },
};
use tokio::sync::{
    broadcast,
    mpsc::{self, Receiver, Sender},
    Mutex,
};

#[derive(Clone)]
pub struct VideoPlayer {
    pub width: i32,
    pub height: i32,
    pub short_name: CString,
    pub url: CString,
    pub out_file: CString,
    pub input_format: *const AVInputFormat,
    pub in_format_context: *mut AVFormatContext,
    pub out_format_context: *mut AVFormatContext,
    pub video_index: isize,
    pub in_av_stream: *mut *mut AVStream,
    pub in_codec_parameters: *mut AVCodecParameters,
    pub in_av_codec: *const AVCodec,
    pub in_codec_context: *mut AVCodecContext,
    pub play_sws_context: *mut SwsContext,
    pub file_sws_context: *mut SwsContext,
    pub video_subsystem: Option<VideoSubsystem>,
    pub sender: Sender<DecoderData>,
    pub recevier: Arc<Mutex<Receiver<DecoderData>>>,
    pub output_format: *const AVOutputFormat,
    pub out_vstream: *mut AVStream,
    pub out_codec: *const AVCodec,
    pub out_codec_parameters: *mut AVCodecParameters,
    pub out_codec_context: *mut AVCodecContext,
    pub running: Arc<AtomicBool>,
    pub end: Arc<Mutex<bool>>,
}

unsafe impl Send for VideoPlayer {}
unsafe impl Sync for VideoPlayer {}

impl VideoPlayer {
    pub fn new(
        short_name: String,
        url: String,
        out_file: String,
        running: Arc<AtomicBool>,
    ) -> Self {
        let (sender, recevier): (Sender<DecoderData>, Receiver<DecoderData>) =
            mpsc::channel::<DecoderData>(1024);
        VideoPlayer {
            width: 0,
            height: 0,
            short_name: CString::new(short_name).unwrap(),
            url: CString::new(url).unwrap(),
            out_file: CString::new(out_file).unwrap(),
            input_format: null_mut(),
            in_format_context: null_mut(),
            out_format_context: null_mut(),
            video_index: -1,
            in_av_stream: null_mut(),
            in_codec_parameters: null_mut(),
            in_av_codec: null_mut(),
            in_codec_context: null_mut(),
            play_sws_context: null_mut(),
            file_sws_context: null_mut(),
            video_subsystem: None,
            sender,
            recevier: Arc::new(Mutex::new(recevier)),
            output_format: null_mut(),
            out_vstream: null_mut(),
            out_codec: null_mut(),
            out_codec_parameters: null_mut(),
            out_codec_context: null_mut(),
            running,
            end: Arc::new(Mutex::new(false)),
        }
    }

    pub async fn find_input_context(&mut self) -> Result<(), CustomError> {
        self.input_format =
            unsafe { av_find_input_format(self.short_name.as_ptr() as *const c_char) };
        Ok(())
    }

    pub async fn alloc_format_context(&mut self) -> Result<(), CustomError> {
        self.in_format_context = unsafe { avformat_alloc_context() };
        self.out_format_context = unsafe { avformat_alloc_context() };
        Ok(())
    }

    pub async fn open_input(&mut self) -> Result<(), CustomError> {
        if unsafe {
            avformat_open_input(
                &mut self.in_format_context,
                self.url.as_ptr() as *const c_char,
                self.input_format,
                null_mut(),
            )
        } < 0
        {
            return Err(CustomError::FfmepgError("cannot open input ".to_owned()));
        }
        Ok(())
    }

    pub async fn dump_format(&self) -> Result<(), CustomError> {
        unsafe {
            av_dump_format(
                self.in_format_context,
                0,
                self.url.as_ptr() as *const c_char,
                0,
            );
        }
        Ok(())
    }

    pub async fn stream_index(&mut self) -> Result<(), CustomError> {
        for index in 0..unsafe { (*self.in_format_context).nb_streams } as isize {
            if unsafe {
                (*(*(*(*self.in_format_context).streams).offset(index)).codecpar).codec_type
            } == AVMediaType::AVMEDIA_TYPE_VIDEO
            {
                self.video_index = index;
                self.in_av_stream = unsafe { (*self.in_format_context).streams.offset(index) };
                self.in_codec_parameters = unsafe { (**self.in_av_stream).codecpar };
            }
        }
        if -1 == self.video_index {
            println!("Cannot find video stream in file");
            return Err(CustomError::FfmepgError(
                "Cannot find video stream in file".to_string(),
            ));
        }
        Ok(())
    }

    pub async fn find_decoder(&mut self) -> Result<(), CustomError> {
        self.in_av_codec = unsafe { avcodec_find_decoder((*self.in_codec_parameters).codec_id) };
        if self.in_av_codec.is_null() {
            println!("Cannot find valid video decoder");
            return Err(CustomError::FfmepgError(
                "Cannot find vallid video decoder".to_string(),
            ));
        }
        Ok(())
    }

    pub async fn in_codec_alloc_context(&mut self) -> Result<(), CustomError> {
        self.in_codec_context = unsafe { avcodec_alloc_context3(self.in_av_codec) };
        if self.in_codec_context.is_null() {
            println!("Cannot alloc valid decode codec context");
            return Err(CustomError::FfmepgError(
                "Cannot alloc valid decode codec context".to_owned(),
            ));
        }
        Ok(())
    }

    pub async fn in_parameters_to_context(&mut self) -> Result<(), CustomError> {
        let res = unsafe {
            avcodec_parameters_to_context(self.in_codec_context, self.in_codec_parameters)
        };
        if res < 0 {
            return Err(CustomError::FfmepgError(
                "avcodec parameters to context error".to_owned(),
            ));
        }
        self.width = unsafe { (*self.in_codec_context).width };
        self.height = unsafe { (*self.in_codec_context).height };
        Ok(())
    }

    pub async fn open_avcodec(&self) -> Result<(), CustomError> {
        let res = unsafe { avcodec_open2(self.in_codec_context, self.in_av_codec, null_mut()) };
        if res < 0 {
            return Err(CustomError::FfmepgError("open avcodec error".to_owned()));
        }
        Ok(())
    }

    pub async fn read_packet(
        &mut self,
        in_packet_sender: broadcast::Sender<CameraPacket>,
    ) -> Result<(), CustomError> {
        loop {
            let pkt: *mut AVPacket = unsafe { av_packet_alloc() };

            if unsafe { av_read_frame(self.in_format_context, pkt) } < 0 {
                unsafe { av_packet_unref(pkt) };
                return Err(CustomError::FfmepgError("Read frame failed".to_owned()));
            }

            if unsafe { (*pkt).stream_index } != self.video_index as i32 {
                unsafe { av_packet_unref(pkt) };
                continue;
            }

            let mut camera_packet = CameraPacket::default();
            unsafe {
                camera_packet.copy(pkt);
            };
            unsafe { av_packet_unref(pkt) };

            in_packet_sender.send(camera_packet).unwrap();
            if !self.running.load(Ordering::SeqCst) {
                let mut end_lock = self.end.lock().await;
                *end_lock = true;
                return Ok(());
            }
            tokio::task::yield_now().await;
        }
        //Ok(())
    }

    pub async fn read_packet_thread(
        &mut self,
        in_packet_sender: broadcast::Sender<CameraPacket>,
    ) -> Result<(), CustomError> {
        let mut video_clone = self.clone();
        tokio::spawn(async move {
            let _ = video_clone.read_packet(in_packet_sender).await;
        });
        Ok(())
    }

    pub async fn sws_get_context(&mut self) -> Result<(), CustomError> {
        self.play_sws_context = unsafe {
            sws_getContext(
                self.width,
                self.height,
                (*self.in_codec_context).pix_fmt,
                self.width,
                self.height,
                AVPixelFormat::AV_PIX_FMT_RGB24,
                SWS_BILINEAR,
                null_mut(),
                null_mut(),
                null_mut(),
            )
        };
        self.file_sws_context = unsafe {
            sws_getContext(
                self.width,
                self.height,
                (*self.in_codec_context).pix_fmt,
                self.width,
                self.height,
                AVPixelFormat::AV_PIX_FMT_YUV420P,
                SWS_BICUBIC,
                null_mut(),
                null_mut(),
                null_mut(),
            )
        };
        Ok(())
    }

    pub async fn init_sdl(&mut self, sdl: Sdl) -> Result<(), CustomError> {
        let video_subsystem = sdl.video().unwrap();
        self.video_subsystem = Some(video_subsystem);
        Ok(())
    }

    pub async fn play_codec(
        &self,
        play_packet_receiver: Arc<Mutex<broadcast::Receiver<CameraPacket>>>,
    ) -> Result<(), CustomError> {
        loop {
            {
                let end_lock = self.end.lock().await;
                if *end_lock {
                    return Ok(());
                }
            }
            let mut camera_packet = {
                let mut play_packet_recevicer_lock = play_packet_receiver.lock().await;
                let camera_packet = play_packet_recevicer_lock.recv().await;

                match camera_packet {
                    Ok(packet) => packet,
                    Err(_) => {
                        return Err(CustomError::FfmepgError("camera packet error".to_owned()))
                    }
                }
            };

            let pkt: *mut AVPacket = unsafe { av_packet_alloc() };
            unsafe {
                camera_packet.to_av_packet(pkt);
            }

            if unsafe { avcodec_send_packet(self.in_codec_context, pkt) } < 0 {
                unsafe { av_packet_unref(pkt) };
                continue;
            }
            unsafe { av_packet_unref(pkt) };

            loop {
                let mut decoder_data: Vec<u8> = Vec::new();
                {
                    let mut frame = unsafe { av_frame_alloc() };
                    let ret = unsafe { avcodec_receive_frame(self.in_codec_context, frame) };
                    //println!("play ret: {}", ret);
                    if ret == AVERROR(EAGAIN) || ret == AVERROR_EOF {
                        unsafe { av_frame_free(&mut frame) };
                        break;
                    } else if ret < 0 {
                        unsafe { av_frame_free(&mut frame) };
                        return Err(CustomError::FfmepgError("Decoding error".to_owned()));
                    }

                    let width = unsafe { (*frame).width };
                    let height = unsafe { (*frame).height };

                    decoder_data.reserve_exact((width * height * 4) as usize);
                    let dest: [*mut u8; 1] = [decoder_data.as_mut_ptr(); 1];
                    let dest_linesize = [width * 3];
                    unsafe {
                        sws_scale(
                            self.play_sws_context,
                            (*frame).data.as_ptr() as *const *const u8,
                            (*frame).linesize.as_ptr() as *const c_int,
                            0,
                            self.height,
                            dest.as_ptr(),
                            dest_linesize.as_ptr(),
                        )
                    };

                    unsafe { av_frame_free(&mut frame) };
                }
                {
                    let _ = self
                        .sender
                        .send(DecoderData::new(decoder_data, self.width, self.height))
                        .await;
                }
            }
        }
        //Ok(())
    }

    pub async fn async_codec_thread(
        &mut self,
        play_packet_receiver: Arc<Mutex<broadcast::Receiver<CameraPacket>>>,
    ) -> Result<(), CustomError> {
        let video_player_clone = self.clone();
        tokio::spawn(async move {
            let _ = video_player_clone.play_codec(play_packet_receiver).await;
        });
        Ok(())
    }

    pub async fn player(&mut self, sdl: Sdl) -> Result<(), CustomError> {
        let window = self
            .video_subsystem
            .as_ref()
            .unwrap()
            .window("Camera", 640, 480)
            .position_centered()
            .opengl()
            .build()
            .map_err(|e| e.to_string())
            .unwrap();

        let mut canvas = window
            .into_canvas()
            .build()
            .map_err(|e| e.to_string())
            .unwrap();
        let texture_creator = canvas.texture_creator();
        let mut texture;

        let mut event_pump = sdl.event_pump().unwrap();

        'running: loop {
            {
                let end_lock = self.end.lock().await;
                if *end_lock {
                    return Ok(());
                }
            }

            let mut receiver_lock;
            {
                receiver_lock = self.recevier.lock().await;
            }
            let (data, width, height) = match receiver_lock.recv().await {
                Some(decoder_data) => (decoder_data.data, decoder_data.width, decoder_data.width),
                None => continue,
            };

            for event in event_pump.poll_iter() {
                match event {
                    Event::Quit { .. }
                    | Event::KeyDown {
                        keycode: Some(Keycode::Escape),
                        ..
                    } => {
                        break 'running;
                    }
                    _ => {}
                }
            }

            texture = texture_creator
                .create_texture_target(PixelFormatEnum::RGB24, width as u32, height as u32)
                .unwrap();
            texture
                .update(None, data.as_slice(), (width * 3) as usize)
                .expect("update texture error");
            canvas.clear();
            canvas
                .copy(
                    &texture,
                    None,
                    Some(Rect::new(0, 0, width as u32, height as u32)),
                )
                .unwrap();

            canvas.present();
        }

        Ok(())
    }

    pub async fn alloc_output_context(&mut self) -> Result<(), CustomError> {
        let res = unsafe {
            avformat_alloc_output_context2(
                &mut self.out_format_context,
                null_mut(),
                null_mut(),
                self.out_file.as_ptr() as *const c_char,
            )
        };
        if res < 0 {
            println!("Cannot alloc output file context");
            return Err(CustomError::FfmepgError(
                "Cannot alloc output file context".to_owned(),
            ));
        }
        self.output_format = unsafe { (*self.out_format_context).oformat };
        Ok(())
    }

    pub async fn avio_open(&mut self) -> Result<(), CustomError> {
        let res = unsafe {
            avio_open(
                &mut (*self.out_format_context).pb,
                self.out_file.as_ptr() as *const c_char,
                AVIO_FLAG_READ_WRITE,
            )
        };
        if res < 0 {
            println!("Output file open failed");
            return Err(CustomError::FfmepgError(
                "Output file open failed".to_owned(),
            ));
        }
        Ok(())
    }

    pub async fn new_stream(&mut self) -> Result<(), CustomError> {
        self.out_vstream = unsafe { avformat_new_stream(self.out_format_context, self.out_codec) };
        if self.out_vstream.is_null() {
            println!("Failed to create output stream");
            return Err(CustomError::FfmepgError(
                "Failed to create output stream".to_owned(),
            ));
        }
        Ok(())
    }

    pub async fn get_out_codec_parameters(&mut self) -> Result<(), CustomError> {
        self.out_codec_parameters = unsafe {
            (**(*self.out_format_context)
                .streams
                .offset((*self.out_vstream).index as isize))
            .codecpar
        };
        Ok(())
    }

    pub async fn set_out_codec_params(&mut self) -> Result<(), CustomError> {
        unsafe {
            (*self.out_vstream).time_base.den = 30;
            (*self.out_vstream).time_base.num = 1;

            (*self.out_codec_parameters).codec_type = AVMediaType::AVMEDIA_TYPE_VIDEO;
            (*self.out_codec_parameters).codec_id = (*self.output_format).video_codec;
            (*self.out_codec_parameters).width = self.width;
            (*self.out_codec_parameters).height = self.height;
            (*self.out_codec_parameters).bit_rate = 110000;
        }
        Ok(())
    }

    pub async fn find_encoder(&mut self) -> Result<(), CustomError> {
        self.out_codec = unsafe { avcodec_find_encoder((*self.output_format).video_codec) };
        if self.out_codec.is_null() {
            println!("Cannot find any encoder");
            return Err(CustomError::FfmepgError(
                "Cannot find any encoder".to_owned(),
            ));
        }
        Ok(())
    }

    pub async fn out_codec_alloc_context(&mut self) -> Result<(), CustomError> {
        self.out_codec_context = unsafe { avcodec_alloc_context3(self.out_codec) };
        if self.out_codec_context.is_null() {
            println!("Cannot alloc context");
            return Err(CustomError::FfmepgError("Cannot alloc context".to_owned()));
        }
        Ok(())
    }

    pub async fn out_parameters_to_context(&self) -> Result<(), CustomError> {
        unsafe {
            avcodec_parameters_to_context(self.out_codec_context, self.out_codec_parameters);
        }
        Ok(())
    }

    pub async fn set_out_codec_context_params(&mut self) -> Result<(), CustomError> {
        unsafe {
            (*self.out_codec_context).codec_id = (*self.output_format).video_codec;
            (*self.out_codec_context).codec_type = AVMediaType::AVMEDIA_TYPE_VIDEO;
            (*self.out_codec_context).pix_fmt = AVPixelFormat::AV_PIX_FMT_YUV420P;
            (*self.out_codec_context).width = self.width;
            (*self.out_codec_context).height = self.height;
            (*self.out_codec_context).time_base.num = 1;
            (*self.out_codec_context).time_base.den = 30;
            (*self.out_codec_context).bit_rate = 110000;
            (*self.out_codec_context).gop_size = 10;
            if (*self.out_codec_context).codec_id == AVCodecID::AV_CODEC_ID_H264 {
                (*self.out_codec_context).qmin = 10;
                (*self.out_codec_context).qmax = 51;
                (*self.out_codec_context).qcompress = 0.6;
            }
            if (*self.out_codec_context).codec_id == AVCodecID::AV_CODEC_ID_MPEG2VIDEO {
                (*self.out_codec_context).max_b_frames = 2;
            }
            if (*self.out_codec_context).codec_id == AVCodecID::AV_CODEC_ID_MPEG1VIDEO {
                (*self.out_codec_context).mb_decision = 2;
            }
        }
        Ok(())
    }

    pub async fn codec_open(&mut self) -> Result<(), CustomError> {
        let res = unsafe { avcodec_open2(self.out_codec_context, self.out_codec, null_mut()) };
        if res < 0 {
            println!("Open encoder failed");
            return Err(CustomError::FfmepgError("Open encoder failed".to_owned()));
        }
        Ok(())
    }

    pub async fn out_dump_format(&self) -> Result<(), CustomError> {
        unsafe {
            av_dump_format(
                self.out_format_context,
                0,
                self.out_file.as_ptr() as *const c_char,
                1,
            );
        }
        Ok(())
    }

    pub async fn write_header(&self) -> Result<(), CustomError> {
        unsafe {
            avformat_write_header(self.out_format_context, null_mut());
        }
        Ok(())
    }

    pub async fn write_data(
        &self,
        file_packet_receiver: Arc<Mutex<broadcast::Receiver<CameraPacket>>>,
    ) -> Result<(), CustomError> {
        let mut frame_index = 0;
        loop {
            {
                let end_lock = self.end.lock().await;
                if *end_lock {
                    return Ok(());
                }
            }
            let mut camera_packet = {
                let mut file_packet_recevicer_lock = file_packet_receiver.lock().await;
                let camera_packet = file_packet_recevicer_lock.recv().await;

                match camera_packet {
                    Ok(packet) => packet.clone(),
                    Err(_) => {
                        return Err(CustomError::FfmepgError("camera packet error".to_owned()))
                    }
                }
            };

            let in_pkt: *mut AVPacket = unsafe { av_packet_alloc() };
            unsafe {
                camera_packet.to_av_packet(in_pkt);
            }

            if unsafe { avcodec_send_packet(self.in_codec_context, in_pkt) } < 0 {
                unsafe { av_packet_unref(in_pkt) };
                continue;
            }

            let mut src_frame = unsafe { av_frame_alloc() };
            loop {
                let ret = unsafe { avcodec_receive_frame(self.in_codec_context, src_frame) };
                if ret == AVERROR(EAGAIN) || ret == AVERROR_EOF {
                    unsafe { av_frame_free(&mut src_frame) };
                    break;
                } else if ret < 0 {
                    unsafe { av_frame_free(&mut src_frame) };
                    return Err(CustomError::FfmepgError("Decoding error".to_owned()));
                }

                let num_bytes = unsafe {
                    av_image_get_buffer_size(
                        AVPixelFormat::AV_PIX_FMT_YUV420P,
                        self.width,
                        self.height,
                        1,
                    )
                };

                let out_buffer = unsafe { av_malloc(num_bytes as usize) as *const u8 };

                let mut yuv_frame = unsafe { av_frame_alloc() };

                let _ret = unsafe {
                    av_image_fill_arrays(
                        (*yuv_frame).data.as_ptr() as *mut *mut u8,
                        (*yuv_frame).linesize.as_ptr() as *mut c_int,
                        out_buffer,
                        AVPixelFormat::AV_PIX_FMT_YUV420P,
                        self.width,
                        self.height,
                        1,
                    )
                };
                unsafe {
                    (*yuv_frame).format = 0; //AVPixelFormat::AV_PIX_FMT_YUV420P
                    (*yuv_frame).width = self.width;
                    (*yuv_frame).height = self.height;
                    (*yuv_frame).pts = (*src_frame).pts;
                }

                let _ret = unsafe {
                    sws_scale(
                        self.file_sws_context,
                        (*src_frame).data.as_ptr() as *const *const u8,
                        (*src_frame).linesize.as_ptr() as *const c_int,
                        0,
                        self.height,
                        (*yuv_frame).data.as_ptr() as *const *mut u8,
                        (*yuv_frame).linesize.as_ptr() as *const c_int,
                    )
                };

                let ret = unsafe { avcodec_send_frame(self.out_codec_context, yuv_frame) };

                if ret >= 0 {
                    let out_pkt = unsafe { av_packet_alloc() };
                    let ret = unsafe { avcodec_receive_packet(self.out_codec_context, out_pkt) };
                    if ret >= 0 {
                        unsafe {
                            (*out_pkt).stream_index = (*self.out_vstream).index;

                            if (*out_pkt).pts == AV_NOPTS_VALUE {
                                println!("frame_index: {}", frame_index);
                                //Write PTS
                                let time_base1 = (**self.in_av_stream).time_base;
                                //Duration between 2 frames (us)
                                let calc_duration = AV_TIME_BASE
                                    / av_q2d((**self.in_av_stream).r_frame_rate) as i32;
                                //Parameters
                                (*out_pkt).pts = ((frame_index * calc_duration)
                                    / (av_q2d(time_base1) as i32 * AV_TIME_BASE))
                                    as i64;
                                (*out_pkt).dts = (*out_pkt).pts;
                                (*out_pkt).duration = (calc_duration
                                    / (av_q2d(time_base1) as i32 * AV_TIME_BASE))
                                    as i64;
                                frame_index += 1;
                            }
                            //Convert PTS/DTS
                            (*out_pkt).pts = av_rescale_q_rnd(
                                (*out_pkt).pts,
                                (**self.in_av_stream).time_base,
                                (*self.out_vstream).time_base,
                                ffmpeg_sys_next::AVRounding::AV_ROUND_PASS_MINMAX,
                            );
                            (*out_pkt).dts = av_rescale_q_rnd(
                                (*out_pkt).dts,
                                (**self.in_av_stream).time_base,
                                (*self.out_vstream).time_base,
                                ffmpeg_sys_next::AVRounding::AV_ROUND_PASS_MINMAX,
                            );
                            (*out_pkt).duration = av_rescale_q(
                                (*out_pkt).duration,
                                (**self.in_av_stream).time_base,
                                (*self.out_vstream).time_base,
                            );

                            (*out_pkt).pos = -1;
                            av_interleaved_write_frame(self.out_format_context, out_pkt);
                        }
                    }

                    unsafe {
                        av_packet_unref(out_pkt);
                    }
                }

                unsafe {
                    av_frame_free(&mut yuv_frame);
                }
            }
            unsafe {
                av_frame_free(&mut src_frame);
            }
            unsafe {
                av_packet_unref(in_pkt);
            }
        }
        //Ok(())
    }

    pub async fn write_trailer(&self) -> Result<(), CustomError> {
        unsafe {
            av_write_trailer(self.out_format_context);
        }
        Ok(())
    }

    pub async fn write_data_thread(
        &self,
        file_packet_receiver: Arc<Mutex<broadcast::Receiver<CameraPacket>>>,
    ) -> Result<(), CustomError> {
        let video_clone = self.clone();
        tokio::spawn(async move {
            let _ = video_clone.write_data(file_packet_receiver).await;
        });
        Ok(())
    }

    pub async fn close(&self) -> Result<(), CustomError> {
        unsafe {
            println!("end");
            //avformat_close_input(&mut &mut *self.input_format);
            avformat_free_context(self.in_format_context);
            avformat_free_context(self.out_format_context);
        }
        Ok(())
    }
}
