use std::{collections::VecDeque, ptr::null_mut, sync::Arc, time::Instant};

use ffmpeg_sys_next::{
    av_frame_alloc, av_frame_free, av_packet_alloc, av_packet_unref, av_q2d, av_samples_get_buffer_size, avcodec_alloc_context3, avcodec_open2, avcodec_parameters_to_context, avcodec_receive_frame, avcodec_send_packet, swr_alloc, swr_alloc_set_opts2, swr_convert, swr_init, AVChannelLayout, AVCodecParameters, AVFrame, AVPacket, AVERROR, AVERROR_EOF, EAGAIN
};
use sdl2::{
    audio::{AudioCallback, AudioDevice, AudioSpecDesired},
    Sdl,
};
use tokio::{
    sync::{
        mpsc::{self, Receiver, Sender},
        Mutex, RwLock,
    },
    time::{sleep, Duration},
};

use crate::{
    CustomError, DecoderData, Packet, PlayStatus, FRAME_QUEUE_MAX_SIZE, MAX_AUDIO_FRAME_SIZE,
    MAX_STOP_SLEEP_TIME, PACKET_QUEUE_MAX_SIZE,
};

#[derive(Debug, Clone)]
pub struct SquareWave {
    pub data: Arc<std::sync::Mutex<VecDeque<Vec<i16>>>>,
}

impl AudioCallback for SquareWave {
    type Channel = i16;

    fn callback(&mut self, out: &mut [i16]) {
        let mut buffer = self.data.lock().unwrap();
        match buffer.pop_front() {
            None => {
                //println!("audio callback no data.");
            }
            Some(data) => out.copy_from_slice(data.as_slice()),
        };
    }
}

#[derive(Clone)]
pub struct AudioPlayer {
    pub audio_index: isize,
    pub audio_codec_parameters: *mut AVCodecParameters,
    pub packet_queue_sender: Sender<Packet>,
    pub packet_queue_receiver: Arc<Mutex<Receiver<Packet>>>,
    pub frame_queue_sender: Sender<DecoderData>,
    pub frame_queue_receiver: Arc<Mutex<Receiver<DecoderData>>>,
    pub channels: i32,
    pub sample_rate: i32,
    pub sample_fmt: ffmpeg_sys_next::AVSampleFormat,
    pub out_channel_layout: *const AVChannelLayout,
    pub out_nb_samples: i32,
    pub out_sample_fmt: ffmpeg_sys_next::AVSampleFormat,
    pub out_sample_rate: i32,
    pub time_base: ffmpeg_sys_next::AVRational,
    pub out_buffer_size: i32,
    pub audio_codec: *mut ffmpeg_sys_next::AVCodec,
    pub audio_codec_context: *mut ffmpeg_sys_next::AVCodecContext,
    pub swr_context: *mut ffmpeg_sys_next::SwrContext,
    pub audio_device: Arc<Option<AudioDevice<SquareWave>>>,
    pub audio_callback_data: Arc<std::sync::Mutex<VecDeque<Vec<i16>>>>,
    pub audio_time: Arc<RwLock<f64>>,
    pub status: Arc<Mutex<PlayStatus>>,
}

unsafe impl Send for AudioPlayer {}

impl AudioPlayer {
    pub fn new(status: Arc<Mutex<PlayStatus>>, audio_time: Arc<RwLock<f64>>) -> Self {
        let (frame_queue_sender, frame_queue_receiver): (
            Sender<DecoderData>,
            Receiver<DecoderData>,
        ) = mpsc::channel(FRAME_QUEUE_MAX_SIZE);
        let (packet_queue_sender, packet_queue_receiver): (Sender<Packet>, Receiver<Packet>) =
            mpsc::channel(PACKET_QUEUE_MAX_SIZE);
        AudioPlayer {
            audio_index: -1,
            audio_codec_parameters: null_mut(),
            packet_queue_sender,
            packet_queue_receiver: Arc::new(Mutex::new(packet_queue_receiver)),
            frame_queue_sender,
            frame_queue_receiver: Arc::new(Mutex::new(frame_queue_receiver)),
            channels: 2,
            sample_rate: 44100,
            out_channel_layout: null_mut(),
            out_nb_samples: 1024,
            sample_fmt: ffmpeg_sys_next::AVSampleFormat::AV_SAMPLE_FMT_S16,
            out_sample_fmt: ffmpeg_sys_next::AVSampleFormat::AV_SAMPLE_FMT_S16,
            out_sample_rate: 44100,
            time_base: ffmpeg_sys_next::AVRational { num: 0, den: 0 },
            out_buffer_size: 0,
            audio_codec: null_mut(),
            audio_codec_context: null_mut(),
            swr_context: null_mut(),
            audio_device: Arc::new(None),
            audio_callback_data: Arc::new(std::sync::Mutex::new(VecDeque::new())),
            audio_time,
            status,
        }
    }

    /**
     * 复制编解码器上下文对象，用于保存从视频流中抽取的帧
     */
    pub async fn codec_alloc_context(&mut self) -> Result<(), CustomError> {
        if self.audio_index == -1 {
            return Ok(());
        }
        let codec_context = unsafe { avcodec_alloc_context3(self.audio_codec) };
        if codec_context.is_null() {
            return Err(CustomError::FfmepgError("Unsupported codec".to_owned()));
        }
        self.audio_codec_context = codec_context;
        Ok(())
    }

    /**
     * 复制编解码器上下文对象
     */
    pub async fn parameters_to_context(&mut self) -> Result<(), CustomError> {
        if self.audio_index == -1 {
            return Ok(());
        }
        if unsafe {
            avcodec_parameters_to_context(self.audio_codec_context, self.audio_codec_parameters)
        } != 0
        {
            return Err(CustomError::FfmepgError(
                "Couldn't copy codec context".to_owned(),
            ));
        }
        Ok(())
    }

    /**
     * 打开解码器
     */
    pub async fn open_codec(&mut self) -> Result<(), CustomError> {
        if self.audio_index == -1 {
            return Ok(());
        }
        if unsafe { avcodec_open2(self.audio_codec_context, self.audio_codec, null_mut()) } != 0 {
            return Err(CustomError::FfmepgError("Could not open codec.".to_owned()));
        }
        self.sample_fmt = unsafe { (*self.audio_codec_context).sample_fmt };
        Ok(())
    }

    pub async fn swr_context(&mut self) -> Result<(), CustomError> {
        if self.audio_index == -1 {
            return Ok(());
        }
        self.swr_context = unsafe { swr_alloc() };
        unsafe {
            swr_alloc_set_opts2(
                &mut self.swr_context,
                self.out_channel_layout,
                self.out_sample_fmt,
                self.out_sample_rate,
                self.out_channel_layout,
                self.sample_fmt,
                self.sample_rate,
                0,
                null_mut(),
            )
        }; //重采样结构体赋值
        unsafe {
            swr_init(self.swr_context); //将重采样结构体参数加载
        }
        Ok(())
    }

    pub async fn get_samples_buffer_size(&mut self) -> Result<(), CustomError> {
        if self.audio_index == -1 {
            return Ok(());
        }
        self.out_buffer_size = unsafe {
            av_samples_get_buffer_size(
                null_mut(),
                (*self.out_channel_layout).nb_channels,
                self.out_nb_samples,
                self.out_sample_fmt,
                1,
            )
        };
        Ok(())
    }

    pub async fn decode_packet(&mut self, has_video: bool) -> Result<(), CustomError> {
        loop {
            {
                let status = self.status.lock().await;
                if let PlayStatus::Pause = *status {
                    sleep(Duration::from_millis(MAX_STOP_SLEEP_TIME)).await;
                    continue;
                };
            }

            let mut pkt = {
                let mut packet_queue_receiver_lock = self.packet_queue_receiver.lock().await;
                match packet_queue_receiver_lock.try_recv() {
                    Ok(packet) => packet,
                    Err(_) => {
                        let mut status_lock = self.status.lock().await;
                        match *status_lock {
                            PlayStatus::PacketEnd => {
                                if has_video {
                                    *status_lock = PlayStatus::AudioFrameEnd;
                                } else {
                                    *status_lock = PlayStatus::End;
                                }
                                break;
                            }
                            PlayStatus::VideoFrameEnd => {
                                *status_lock = PlayStatus::End;
                                break;
                            }
                            _ => {
                                continue;
                            }
                        };
                    }
                }
            };

            let packet: *mut AVPacket = unsafe { av_packet_alloc() };
            unsafe {
                pkt.to_av_packet(packet);
            }

            if unsafe { avcodec_send_packet(self.audio_codec_context, packet) } < 0 {
                unsafe {
                    av_packet_unref(packet);
                }
                println!("Failed to decode packet.");
                return Err(CustomError::FfmepgError(
                    "Failed to decode packet".to_owned(),
                ));
            }
            unsafe {
                av_packet_unref(packet);
            }

            loop {
                let mut decoder_data = DecoderData::default();
                {
                    let mut frame = unsafe { av_frame_alloc() };
                    let res = unsafe { avcodec_receive_frame(self.audio_codec_context, frame) };
                    if res == 0 {
                        let size = (self.out_buffer_size * 2) as usize;
                        decoder_data.data.reserve_exact(size);
                        let dest = [decoder_data.data.as_mut_ptr()];
                        self.swr_convert(frame, dest)?;

                        decoder_data.pts = unsafe { (*frame).pts };
                        decoder_data.time_base = self.time_base;

                        unsafe {
                            av_frame_free(&mut frame);
                        }
                    } else if res == AVERROR_EOF || res == AVERROR(EAGAIN) {
                        unsafe {
                            av_frame_free(&mut frame);
                        }
                        break;
                    } else {
                        unsafe {
                            av_frame_free(&mut frame);
                        }
                        //println!("Failed decode, ");
                        return Err(CustomError::FfmepgError("Failed decode".to_lowercase()));
                    }
                }

                let _ = self.frame_queue_sender.send(decoder_data).await;
            }
        }
        Ok(())
    }

    pub fn swr_convert(&self, frame: *mut AVFrame, dest: [*mut u8; 1]) -> Result<(), CustomError> {
        let res = unsafe {
            swr_convert(
                self.swr_context,
                dest.as_ptr() as *mut *mut u8,
                MAX_AUDIO_FRAME_SIZE,
                (*frame).data.as_ptr() as *mut *const u8,
                (*frame).nb_samples,
            )
        };
        if res < 0 {
            return Err(CustomError::FfmepgError("音频转码失败.".to_owned()));
        }
        Ok(())
    }

    pub async fn init_audio_sdl(&mut self, sdl_context: &mut Sdl) -> Result<(), CustomError> {
        if self.audio_index != -1 {
            let desired_spec = AudioSpecDesired {
                freq: Some(self.out_sample_rate),
                channels: Some(self.channels as u8),
                samples: Some(1024),
            };

            let audio_subsystem = sdl_context.audio().unwrap();

            let device = audio_subsystem
                .open_playback(None, &desired_spec, |_spec| {
                    // initialize the audio callback
                    SquareWave {
                        data: self.audio_callback_data.clone(),
                    }
                })
                .unwrap();
            self.audio_device = Arc::new(Some(device));
        }
        Ok(())
    }

    pub async fn audio_player(&mut self) -> Result<(), CustomError> {
        if -1 == self.audio_index {
            return Err(CustomError::FfmepgError(String::from("no audio data")));
        }
        match self.audio_device.as_ref() {
            Some(device) => {
                device.resume();
            }
            None => {
                println!("Audio resume failed");
            }
        };
        let mut audio_status = true;
        loop {
            let now = Instant::now();
            {
                let status_lock = self.status.lock().await;
                //println!("audio status: {:?}", status_lock);
                match *status_lock {
                    PlayStatus::Pause => {
                        if audio_status {
                            match self.audio_device.as_ref() {
                                Some(device) => {
                                    //println!("Audio pause");
                                    device.pause();
                                }
                                None => {
                                    println!("Audio pause failed");
                                }
                            };
                            audio_status = false;
                        }
                    }
                    PlayStatus::End => {
                        break;
                    }
                    _ => {
                        if !audio_status {
                            match self.audio_device.as_ref() {
                                Some(device) => {
                                    //println!("Audio resume");
                                    device.resume();
                                }
                                None => {
                                    println!("Audio resume failed");
                                }
                            };
                            audio_status = true;
                        }
                    }
                };
            }
            if !audio_status {
                sleep(Duration::from_millis(MAX_STOP_SLEEP_TIME)).await;
                continue;
            };

            //println!("audio player {:?}", audio_status);

            {
                let mut frame_queue_receiver_lock = self.frame_queue_receiver.lock().await;
                let audio_data = match frame_queue_receiver_lock.recv().await {
                    Some(data) => data,
                    None => continue,
                };
                //println!("audio get");

                let mut result: Vec<i16> = Vec::new();
                unsafe {
                    for i in 0..(self.out_buffer_size / 2) {
                        result.push(
                            (((*(audio_data.data.as_ptr()).add((i * 2 + 1) as usize)) as i16) << 8)
                                | *(audio_data.data.as_ptr()).add((i * 2) as usize) as i16,
                        );
                    }
                }

                {
                    while self.audio_callback_data.lock().unwrap().len() > 1 {
                        sleep(Duration::from_millis(1)).await;
                    }
                    self.audio_callback_data.lock().unwrap().push_back(result);
                }

                let audio_sec = unsafe { audio_data.pts as f64 * av_q2d(audio_data.time_base) };
                let mut audio_time = self.audio_time.write().await;
                (*audio_time) = audio_sec;
                //println!("put audio time {:?}", audio_sec);
                {
                    let status_lock = self.status.lock().await;
                    if let PlayStatus::End = *status_lock {
                        (*audio_time) = -1.0;
                        break;
                    }
                }
            }

            let distance = (1024 * 1000000 / self.sample_rate / 1000) as i64
                - now.elapsed().as_millis() as i64
                - 3;
            if distance > 0 {
                sleep(Duration::from_millis(distance as u64)).await;
            }
        }
        Ok(())
    }

    // pub fn audio_check(&mut self) -> Result<(), CustomError> {
    //     {
    //         let audio_callback_data = self.audio_callback_data.lock().unwrap();
    //         if audio_callback_data.len() == 0 {
    //             match self.audio_device.as_ref() {
    //                 Some(device) => {
    //                     device.pause();
    //                 }
    //                 None => {
    //                     println!("Audio pause failed");
    //                 }
    //             }
    //         } else {
    //             match self.audio_device.as_ref() {
    //                 Some(device) => {
    //                     device.resume();
    //                 }
    //                 None => {
    //                     println!("Audio resume failed");
    //                 }
    //             }
    //         }
    //     }
    //     Ok(())
    // }
}
