use std::sync::mpsc::Sender;
use std::sync::{mpsc, MutexGuard, RwLock};
use std::sync::{Arc, Mutex};
use std::time::Instant;
//一个通道可以有多个发送端，只能有一个接收端
use std::{panic, thread};

use std::time::Duration;

use sdl2::audio::{AudioCallback, AudioDevice, AudioFormat, AudioSpec, AudioSpecDesired};
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
use sdl2::pixels::Color;
use sdl2::pixels::PixelFormatEnum;
use sdl2::rect::Rect;

use crate::video::{load_video, AudioContext, DecoderData, FramePacket, VideoContext};

use crate::mqueue::Queue;

use crate::{gl_renderer, mqueue, video};
use cgmath::Matrix;
use sdl2::libc::{exit, strlen};
use sdl2::render::{Texture, WindowCanvas};
use sdl2::video::{GLProfile, WindowPos};
use sdl2::{EventPump, Sdl};
use std::env;
use std::ops::DerefMut;
use std::ptr::null_mut;
use std::thread::Builder;

const MAX_QUEUE_SIZE: usize = 1000;

#[derive(Debug)]
pub struct SquareWave {
    pub data: Arc<Mutex<mqueue::Queue<Vec<i16>>>>,
}

impl AudioCallback for SquareWave {
    type Channel = i16;

    fn callback(&mut self, out: &mut [i16]) {
        let mut buffer = self.data.lock().unwrap();
        if buffer.len() > 0 {
            out.copy_from_slice(&buffer.pop().unwrap().as_slice());
        }
    }
}

fn r2d(r: ffmpeg_sys_next::AVRational) -> f64 {
    if r.den == 0 {
        return 0.0;
    }
    return (r.num / r.den) as f64;
}

pub struct ReadFrameStatus {
    pub end: bool,
}

pub struct AudioTime {
    pub time: f64,
}

pub struct PlayerStatus {
    pub status: bool,
}

pub struct Player {
    pub video_ctx: VideoContext,
    pub audio_ctx: AudioContext,
    pub path: String,
    pub position_x: i32,
    pub position_y: i32,
    pub width: i32,
    pub height: i32,
    pub video_queue: Arc<Mutex<Queue<DecoderData>>>,
    pub audio_queue: Arc<Mutex<Queue<DecoderData>>>,
    pub video_packet_queue: Arc<Mutex<Queue<FramePacket>>>,
    pub audio_packet_queue: Arc<Mutex<Queue<FramePacket>>>,
    pub player_status: Arc<Mutex<PlayerStatus>>,
    pub audio_callback_data: Arc<Mutex<Queue<Vec<i16>>>>,
    pub audio_time: Arc<RwLock<AudioTime>>,
    pub read_packet_end: Arc<Mutex<ReadFrameStatus>>,
    pub sdl_context: Sdl,
    pub audio_device: AudioDevice<SquareWave>,
}

pub unsafe fn sdl_audio_init(
    sdl_context: &mut Sdl,
    audio_ctx: &mut AudioContext,
    audio_callback_data: Arc<Mutex<Queue<Vec<i16>>>>,
) -> AudioDevice<SquareWave> {
    let desired_spec = AudioSpecDesired {
        freq: Some(audio_ctx.out_sample_rate),
        channels: Some(audio_ctx.channels as u8),
        samples: Some(1024),
    };

    let audio_subsystem = sdl_context.audio().unwrap();

    let device = audio_subsystem
        .open_playback(None, &desired_spec, |spec| {
            // initialize the audio callback
            SquareWave {
                data: audio_callback_data,
            }
        })
        .unwrap();
    return device;
}

impl Player {
    pub unsafe fn new(
        path: &str,
        position_x: i32,
        position_y: i32,
        width: i32,
        height: i32,
        nvidia_decoder: bool,
    ) -> Self {
        let mut video_ctx = VideoContext::new();
        let mut audio_ctx = AudioContext::new();
        video_ctx.isNvidiaDecoder = nvidia_decoder;
        let mut audio_callback_data = Arc::new(Mutex::new((Queue::new())));
        load_video(&mut video_ctx, &mut audio_ctx, path);
        let mut sdl_context = sdl2::init().unwrap();
        let device = sdl_audio_init(
            &mut sdl_context,
            &mut audio_ctx,
            audio_callback_data.clone(),
        );
        let mut render_width = width;
        let mut render_height = height;
        if render_width == -1 || render_height == -1 {
            render_width = video_ctx.width;
            render_height = video_ctx.height;
        }
        return Self {
            video_ctx: video_ctx,
            audio_ctx: audio_ctx,
            path: path.to_string(),
            position_x: position_x,
            position_y: position_y,
            video_queue: Arc::new(Mutex::new((Queue::new()))),
            audio_queue: Arc::new(Mutex::new((Queue::new()))),
            video_packet_queue: Arc::new(Mutex::new((Queue::new()))),
            audio_packet_queue: Arc::new(Mutex::new((Queue::new()))),
            player_status: Arc::new(Mutex::new(PlayerStatus { status: true })),
            audio_callback_data: audio_callback_data,
            audio_time: Arc::new(RwLock::new(AudioTime { time: 0.0 })),
            read_packet_end: Arc::new(Mutex::new(ReadFrameStatus { end: false })),
            sdl_context: sdl_context,
            audio_device: device,
            width: render_width,
            height: render_height,
        };
    }

    pub unsafe fn check_audio(&mut self) {
        {
            let mut audio_callback_data = self.audio_callback_data.lock().unwrap();

            if audio_callback_data.len() == 0 {
                self.audio_device.pause();
            } else {
                self.audio_device.resume();
            }
        }
    }

    /**
    同步音频
     */
    pub unsafe fn async_audio(&mut self, video_sec: f64, now: Instant) {
        let mut d_time = 0.0;
        let mut distance = (1000 / self.video_ctx.fps) as i64 - now.elapsed().as_millis() as i64;
        {
            let audio_sec = self.audio_time.read().unwrap();
            if audio_sec.time > 0.0 {
                d_time = video_sec - audio_sec.time;
            }
        }

        distance = distance + (d_time * 1000.0) as i64;
        // println!("{}", distance);
        if distance < 0 {
            distance = 0;
        }

        thread::sleep(Duration::from_millis(distance as u64));
    }

    pub unsafe fn opengl_display(&mut self) {
        println!("opengl_display");
        let video_subsystem = self.sdl_context.video().unwrap();

        let gl_attr = video_subsystem.gl_attr();
        gl_attr.set_context_profile(GLProfile::Core);
        gl_attr.set_context_version(3, 3);
        let width = self.width;
        let height = self.height;
        let window = video_subsystem
            .window("opengl", width as u32, height as u32)
            .position(self.position_x, self.position_y)
            .opengl()
            .build()
            .unwrap();

        // Unlike the other example above, nobody created a context for your window, so you need to create one.
        let ctx = window.gl_create_context().unwrap();
        gl::load_with(|name| video_subsystem.gl_get_proc_address(name) as *const _);

        debug_assert_eq!(gl_attr.context_profile(), GLProfile::Core);
        debug_assert_eq!(gl_attr.context_version(), (3, 3));

        let mut event_pump = self.sdl_context.event_pump().unwrap();
        let shader_program = gl_renderer::create_shader(
            gl_renderer::VERTEX_SHADER_SOURCE,
            gl_renderer::FRAGMENT_SHADER_SOURCE,
        );
        let (vbo, vao, ebo) = gl_renderer::create_geometry();
        let mut y_texture = 0;
        let mut u_texture = 1;
        let mut v_texture = 2;
        gl_renderer::generate_texture(&mut y_texture);
        gl_renderer::generate_texture(&mut u_texture);
        gl_renderer::generate_texture(&mut v_texture);
        let mut y_data: Vec<u8> = Vec::new();
        y_data.reserve_exact((width * height) as usize);
        let mut u_data: Vec<u8> = Vec::new();
        u_data.reserve_exact((width * height) as usize);
        let mut v_data: Vec<u8> = Vec::new();
        v_data.reserve_exact((width * height) as usize);

        gl_renderer::create_texture(
            y_texture,
            width as i32,
            height as i32,
            gl::RED,
            gl::UNSIGNED_BYTE,
            gl::RED as i32,
            y_data.as_ptr() as *const std::ffi::c_void,
        );
        gl_renderer::create_texture(
            u_texture,
            width as i32,
            height as i32,
            gl::RED,
            gl::UNSIGNED_BYTE,
            gl::RED as i32,
            u_data.as_ptr() as *const std::ffi::c_void,
        );
        gl_renderer::create_texture(
            v_texture,
            width as i32,
            height as i32,
            gl::RED,
            gl::UNSIGNED_BYTE,
            gl::RED as i32,
            v_data.as_ptr() as *const std::ffi::c_void,
        );
        let uniform_name = std::ffi::CString::new("projection").unwrap();
        let uniform_y_texture_name = std::ffi::CString::new("y_texture").unwrap();
        let uniform_u_texture_name = std::ffi::CString::new("u_texture").unwrap();
        let uniform_v_texture_name = std::ffi::CString::new("v_texture").unwrap();

        let mut projection_matrix = cgmath::ortho(0.0, width as f32, height as f32, 0.0, -1.0, 1.0);
        let mut model = cgmath::Matrix4::from_translation(cgmath::vec3(0.0, 0.0, 0.0));
        model = model
            * cgmath::Matrix4::from_translation(cgmath::vec3(
                0.5 * (width as f32),
                0.5 * (height as f32),
                0.0,
            ));
        model =
            model * cgmath::Matrix4::from_nonuniform_scale(1.0 * width as f32, height as f32, 1.0);
        projection_matrix = projection_matrix * model;

        gl::UseProgram(shader_program);
        gl::BindVertexArray(vao);

        let vertex_proyection_location =
            gl::GetUniformLocation(shader_program, uniform_name.as_ptr());
        let y_texture_location =
            gl::GetUniformLocation(shader_program, uniform_y_texture_name.as_ptr());
        let u_texture_location =
            gl::GetUniformLocation(shader_program, uniform_u_texture_name.as_ptr());
        let v_texture_location =
            gl::GetUniformLocation(shader_program, uniform_v_texture_name.as_ptr());
        gl::UniformMatrix4fv(
            vertex_proyection_location,
            1,
            gl::FALSE,
            projection_matrix.as_ptr(),
        );

        let mut play_status = true;
        let mut video_status = false;

        'running: loop {
            for event in event_pump.poll_iter() {
                match event {
                    Event::Quit { .. }
                    | Event::KeyDown {
                        keycode: Some(Keycode::Escape),
                        ..
                    } => {
                        break 'running;
                    }
                    Event::KeyDown {
                        keycode: Some(Keycode::Space),
                        ..
                    } => {
                        let mut status = self.player_status.lock().unwrap();
                        play_status = !status.status;
                        if play_status {
                            status.status = play_status;
                            self.audio_device.resume();
                        } else {
                            status.status = play_status;
                            self.audio_device.pause();
                        }
                    }
                    _ => {}
                }
            }
            if !play_status {
                continue;
            }

            let now = Instant::now();

            let (frame_status, frame_decoder_data, video_sec, end, decoder_data) =
                self.get_video_frame();
            video_status = end;
            if frame_status {
                self.check_audio();

                gl::Clear(gl::COLOR_BUFFER_BIT);

                gl_renderer::tex_sub_image_2d(
                    gl::TEXTURE0,
                    y_texture,
                    width as i32,
                    height as i32,
                    decoder_data.data[0].as_ptr(),
                    y_texture_location,
                    0,
                );

                gl_renderer::tex_sub_image_2d(
                    gl::TEXTURE1,
                    u_texture,
                    width as i32,
                    height as i32,
                    decoder_data.data[1].as_ptr(),
                    u_texture_location,
                    1,
                );

                gl_renderer::tex_sub_image_2d(
                    gl::TEXTURE2,
                    v_texture,
                    width as i32,
                    height as i32,
                    decoder_data.data[2].as_ptr(),
                    v_texture_location,
                    2,
                );

                gl::DrawElements(gl::TRIANGLES, 6, gl::UNSIGNED_INT, std::ptr::null());

                window.gl_swap_window();

                self.async_audio(video_sec, now);
            }

            if video_status {
                break 'running;
            }
        }
    }

    /**
    sdl 渲染程序
     */
    pub unsafe fn sdl_display(&mut self) {
        //渲染初始化
        let video_subsystem = self.sdl_context.video().unwrap();
        let mut window = video_subsystem
            .window("sdl2 player", self.width as u32, self.height as u32)
            .position(self.position_x, self.position_y)
            // .position_centered()
            .opengl()
            .build()
            .map_err(|e| e.to_string())
            .unwrap();

        let mut canvas = window
            .into_canvas()
            .build()
            .map_err(|e| e.to_string())
            .unwrap();
        let texture_creator = canvas.texture_creator();
        let mut texture = texture_creator
            .create_texture_streaming(
                PixelFormatEnum::RGB24,
                self.width as u32,
                self.height as u32,
            )
            .map_err(|e| e.to_string())
            .unwrap();

        let mut event_pump = self.sdl_context.event_pump().unwrap();

        let mut play_status = true;
        let mut video_status = false;

        self.audio_device.resume();

        'running: loop {
            for event in event_pump.poll_iter() {
                match event {
                    Event::Quit { .. }
                    | Event::KeyDown {
                        keycode: Some(Keycode::Escape),
                        ..
                    } => {
                        break 'running;
                    }
                    Event::KeyDown {
                        keycode: Some(Keycode::Space),
                        ..
                    } => {
                        let mut status = self.player_status.lock().unwrap();
                        play_status = !status.status;
                        if play_status {
                            status.status = play_status;
                            self.audio_device.resume();
                        } else {
                            status.status = play_status;
                            self.audio_device.pause();
                        }
                    }
                    _ => {}
                }
            }
            if !play_status {
                continue;
            }

            let now = Instant::now();

            let (frame_status, frame_decoder_data, video_sec, end, decoder_data) =
                self.get_video_frame();
            video_status = end;
            if frame_status {
                //self.check_audio();

                texture
                    .update(
                        None,
                        frame_decoder_data.as_slice(),
                        (self.width * 3) as usize,
                    )
                    .expect("update texture error");

                video_status = end;
                canvas.clear();
                canvas
                    .copy(
                        &texture,
                        None,
                        Some(Rect::new(0, 0, self.width as u32, self.height as u32)),
                    )
                    .unwrap();

                canvas.present();
                self.async_audio(video_sec, now);
            }

            if video_status {
                break 'running;
            }
        }
    }

    /**
    播放视频
     */
    pub unsafe fn player(&mut self) {
        self.read_video_thread();
        self.video_decoder_thread();
        if self.audio_ctx.audio_stream_index != -1 {
            self.audio_decoder_thread();
            self.player_audio();
            // self.audio_device.resume();
        }
        // self.sdl_display();
        if self.video_ctx.format == ffmpeg_sys_next::AVPixelFormat::AV_PIX_FMT_YUV444P {
            self.opengl_display();
        } else {
            self.sdl_display();
        }
    }

    /**
    线程数据克隆
     */
    pub fn clone_thread_data(
        &mut self,
    ) -> (
        Arc<Mutex<PlayerStatus>>,
        Arc<Mutex<Queue<FramePacket>>>,
        Arc<Mutex<Queue<FramePacket>>>,
        Arc<Mutex<Queue<DecoderData>>>,
        Arc<Mutex<Queue<DecoderData>>>,
        VideoContext,
        AudioContext,
        Arc<RwLock<AudioTime>>,
        Arc<Mutex<Queue<Vec<i16>>>>,
        Arc<Mutex<ReadFrameStatus>>,
    ) {
        let player_status = self.player_status.clone();
        let video_packet_queue = self.video_packet_queue.clone();
        let audio_packet_queue = self.audio_packet_queue.clone();
        let video_ctx = self.video_ctx.clone();
        let audio_ctx = self.audio_ctx.clone();
        let video_queue = self.video_queue.clone();
        let audio_queue = self.audio_queue.clone();
        let audio_time = self.audio_time.clone();
        let audio_callback_data = self.audio_callback_data.clone();
        let read_packet_end = self.read_packet_end.clone();
        return (
            player_status,
            video_packet_queue,
            audio_packet_queue,
            video_queue,
            audio_queue,
            video_ctx,
            audio_ctx,
            audio_time,
            audio_callback_data,
            read_packet_end,
        );
    }

    /**
    视频读取线程
     */
    pub unsafe fn read_video_thread(&mut self) {
        let (
            player_status,
            video_packet_queue,
            audio_packet_queue,
            video_queue,
            audio_queue,
            mut video_ctx,
            mut audio_ctx,
            audio_time,
            audio_callback_data,
            read_packet_end,
        ) = self.clone_thread_data();

        Builder::new()
            .name("read_video_thread".to_string())
            .spawn(move || loop {
                {
                    let player_status = player_status.lock().unwrap();
                    if player_status.status == false {
                        continue;
                    }
                    {
                        let mut video_packet_queue = video_packet_queue.lock().unwrap();
                        let mut audio_packet_queue = audio_packet_queue.lock().unwrap();
                        if audio_packet_queue.len() >= MAX_QUEUE_SIZE
                            || video_packet_queue.len() >= MAX_QUEUE_SIZE
                        {
                            continue;
                        }
                    }

                    let mut packet = FramePacket::new();
                    let mut data: Vec<u8> = Vec::new();
                    let status =
                        video::read_packet(&mut video_ctx, &mut audio_ctx, &mut packet, &mut data);

                    if status {
                        let video_packet = packet.clone();
                        if video_packet.stream_index == video_ctx.video_stream_index {
                            {
                                video_packet_queue.lock().unwrap().push(video_packet);
                            }
                        }
                        let audio_packet = packet.clone();
                        if audio_packet.stream_index == audio_ctx.audio_stream_index {
                            {
                                audio_packet_queue.lock().unwrap().push(audio_packet);
                            }
                        }
                    } else {
                        {
                            println!("read video packet end");
                            let mut read_packet_end = read_packet_end.lock().unwrap();
                            read_packet_end.end = true;
                        }
                        break;
                    }
                }
            })
            .unwrap();
    }

    /**
    视频解码线程
     */
    pub unsafe fn video_decoder_thread(&mut self) {
        let (
            player_status,
            video_packet_queue,
            audio_packet_queue,
            video_queue,
            audio_queue,
            mut video_ctx,
            mut audio_ctx,
            audio_time,
            audio_callback_data,
            read_packet_end,
        ) = self.clone_thread_data();
        Builder::new()
            .name("video_decoder_thread".to_string())
            .spawn(move || {
                loop {
                    {
                        let player_status = player_status.lock().unwrap();
                        if player_status.status == false {
                            continue;
                        }
                    }
                    let mut decoder_data = video::DecoderData::new();
                    let now = Instant::now();
                    {
                        let mut video_queue = video_queue.lock().unwrap();
                        if video_queue.len() >= MAX_QUEUE_SIZE {
                            continue;
                        }
                    }

                    let mut video_packet_len = 0;
                    let mut tmp_video_packet = FramePacket::new();
                    {
                        let mut video_packet = video_packet_queue.lock().unwrap();
                        if video_packet.len() > 0 {
                            tmp_video_packet = video_packet.pop().unwrap().clone();
                        }
                        video_packet_len = video_packet.len();
                    }

                    if tmp_video_packet.stream_index != -1 {
                        let decoder_status = video::decoder_frame(
                            &mut video_ctx,
                            &mut audio_ctx,
                            &mut decoder_data,
                            &mut tmp_video_packet,
                        );
                        if !decoder_status {
                            continue;
                        }
                        let pt_in_sec = decoder_data.pts as f64 * video_ctx.time_base.num as f64
                            / video_ctx.time_base.den as f64;
                        {
                            let mut video_queue = video_queue.lock().unwrap();
                            // if f64::ceil((*video_ctx.format_context).duration as f64 / 1000000.0) == f64::ceil(pt_in_sec)
                            // {
                            //     decoder_data.end = true;
                            //     video_queue.push(decoder_data);
                            //     break;
                            // }
                            if video_packet_len == 0 {
                                let read_packet_end = read_packet_end.lock().unwrap();
                                if read_packet_end.end {
                                    //解码结束了
                                    decoder_data.end = true;
                                }
                                video_queue.push(decoder_data);
                                break;
                            }
                            video_queue.push(decoder_data);
                        }
                    }
                }
            })
            .expect("video_decoder_thread");
    }

    /**
    音频解码视频
     */
    pub unsafe fn audio_decoder_thread(&mut self) {
        let (
            player_status,
            video_packet_queue,
            audio_packet_queue,
            video_queue,
            audio_queue,
            mut video_ctx,
            mut audio_ctx,
            audio_time,
            audio_callback_data,
            read_packet_end,
        ) = self.clone_thread_data();
        Builder::new()
            .name("audio_decoder_thread".to_string())
            .spawn(move || {
                loop {
                    {
                        let player_status = player_status.lock().unwrap();
                        if player_status.status == false {
                            continue;
                        }
                    }

                    let mut decoder_data = video::DecoderData::new();
                    let now = Instant::now();
                    {
                        let mut audio_queue = audio_queue.lock().unwrap();

                        if audio_queue.len() >= MAX_QUEUE_SIZE {
                            continue;
                        }
                    }

                    let mut audio_packet = audio_packet_queue.lock().unwrap();
                    if audio_packet.len() > 0 {
                        let decoder_status = video::decoder_frame(
                            &mut video_ctx,
                            &mut audio_ctx,
                            &mut decoder_data,
                            &mut audio_packet.pop().unwrap(),
                        );
                        if !decoder_status {
                            continue;
                        }

                        if audio_packet.len() == 0 {
                            {
                                let read_packet_end = read_packet_end.lock().unwrap();
                                if read_packet_end.end {
                                    //解码结束了
                                    decoder_data.end = true;
                                }
                            }
                        }
                        let mut audio_queue = audio_queue.lock().unwrap();
                        audio_queue.push(decoder_data);
                    }
                }
            })
            .expect("audio_decoder_thread");
    }

    /**
    音频播放线程*/
    pub unsafe fn player_audio(&mut self) {
        let (
            player_status,
            video_packet_queue,
            audio_packet_queue,
            video_queue,
            audio_queue,
            mut video_ctx,
            mut audio_ctx,
            audio_time,
            audio_callback_data,
            read_packet_end,
        ) = self.clone_thread_data();
        Builder::new()
            .name("player_audio".to_string())
            .spawn(move || {
                loop {
                    {
                        let player_status = player_status.lock().unwrap();
                        if player_status.status == false {
                            continue;
                        }
                    }
                    let now = Instant::now();
                    {
                        let mut audio_queue_data = audio_queue.lock().unwrap();
                        // println!("audio len{}", audio_queue_data.len());
                        let mut result: Vec<i16> = Vec::new();

                        if audio_queue_data.len() > 0 {
                            let audio = audio_queue_data.pop().unwrap();

                            for i in 0..(audio_ctx.out_buffer_size / 2) {
                                result.push(
                                    (((*(audio.data[0].as_ptr()).add((i * 2 + 1) as usize))
                                        as i16)
                                        << 8)
                                        | *(audio.data[0].as_ptr()).add((i * 2) as usize) as i16,
                                );
                            }

                            let mut buffer = audio_callback_data.lock().unwrap();
                            buffer.push(result);

                            let sleep = 1.0 / audio_ctx.sample_rate as f64 * 1000000.0;
                            let audio_sec =
                                audio.pts as f64 * 1 as f64 / audio_ctx.sample_rate as f64;

                            let mut audio_time = audio_time.write().unwrap();
                            audio_time.time = audio_sec;
                            if audio.end {
                                audio_time.time = -1.0;
                                break;
                            }
                        }
                    }
                    let mut distance = (1024 * 1000000 / audio_ctx.sample_rate / 1000) as i64
                        - now.elapsed().as_millis() as i64;
                    if distance < 0 {
                        distance = 0;
                    }
                    thread::sleep(Duration::from_millis(distance as u64));
                }
            })
            .expect("player_audio");
    }

    /**
    解码视频帧*/
    pub unsafe fn get_video_frame(&mut self) -> (bool, Vec<u8>, f64, bool, DecoderData) {
        let mut decoder_data = DecoderData::new();
        let mut frame_decoder_data: Vec<u8> = Vec::new();

        let mut queue_status = false;
        let mut video_sec = 0.0;
        let mut end = false;
        {
            let mut video_queue_data = self.video_queue.lock().unwrap();
            if video_queue_data.len() > 0 {
                decoder_data = video_queue_data.pop().unwrap();
                queue_status = true;
            }
        }

        if queue_status {
            video_sec = decoder_data.pts as f64 * self.video_ctx.time_base.num as f64
                / self.video_ctx.time_base.den as f64;
            end = decoder_data.end;
            if self.video_ctx.format != ffmpeg_sys_next::AVPixelFormat::AV_PIX_FMT_YUV444P {
                let mut frame = ffmpeg_sys_next::av_frame_alloc();
                (*frame).width = self.video_ctx.width;
                (*frame).height = self.video_ctx.height;

                for i in 0..decoder_data.data.len() {
                    (*frame).data[i] = decoder_data.data[i].as_mut_ptr();
                    (*frame).linesize[i] = decoder_data.line_size[i];
                }

                frame_decoder_data.reserve_exact(((*frame).width * (*frame).height * 4) as usize);
                if (self.video_ctx).sws_scale_ctx.is_null() {
                    (self.video_ctx).sws_scale_ctx = ffmpeg_sys_next::sws_getContext(
                        (*frame).width,
                        (*frame).height,
                        decoder_data.format,
                        self.width,
                        self.height,
                        ffmpeg_sys_next::AVPixelFormat::AV_PIX_FMT_RGB24,
                        ffmpeg_sys_next::SWS_BILINEAR,
                        std::ptr::null_mut(),
                        std::ptr::null_mut(),
                        std::ptr::null_mut(),
                    );
                }

                assert!(
                    !(self.video_ctx).sws_scale_ctx.is_null(),
                    "Couldn't initialize sw scaler"
                );

                let dest = [
                    frame_decoder_data.as_mut_ptr(),
                    std::ptr::null_mut(),
                    std::ptr::null_mut(),
                    std::ptr::null_mut(),
                ];
                let dest_linesize = [self.width * 3, 0, 0, 0];
                ffmpeg_sys_next::sws_scale(
                    (self.video_ctx).sws_scale_ctx,
                    (*frame).data.as_ptr() as *const *const u8,
                    (*frame).linesize.as_ptr(),
                    0,
                    (*frame).height,
                    dest.as_ptr(),
                    dest_linesize.as_ptr(),
                );

                ffmpeg_sys_next::av_frame_free(&mut frame);
            }
        }
        return (
            queue_status,
            frame_decoder_data,
            video_sec,
            end,
            decoder_data,
        );
    }
}
