use std::{
    ffi::CString,
    io::{stdout, Write},
    ptr::null_mut,
    sync::Arc,
};

use crate::{AudioPlayer, CustomError, Packet, PlayStatus, VideoPlayer, MAX_STOP_SLEEP_TIME};
use crossterm::{cursor, execute, terminal};
use ffmpeg_sys_next::{
    av_dump_format, av_packet_alloc, av_packet_unref, av_q2d,
    av_read_frame, avcodec_close, avcodec_find_decoder, avcodec_flush_buffers,
    avformat_alloc_context, avformat_close_input, avformat_find_stream_info, avformat_open_input,
    avformat_seek_file, swr_free, AVCodec, AVCodecParameters, AVFormatContext, AVMediaType,
    AVStream, AVSEEK_FLAG_BACKWARD, AV_TIME_BASE,
};
use sdl2::Sdl;
use tokio::{
    sync::{
        mpsc::{channel, Receiver, Sender},
        Mutex, RwLock,
    },
    time::{sleep, Duration},
};

#[derive(Clone)]
pub struct MediaPlayer {
    pub video_path: CString,
    pub video_player: VideoPlayer,
    pub audio_player: AudioPlayer,
    // 播放标志
    pub play_status: Arc<Mutex<PlayStatus>>,
    // 保存文件容器封装信息及码流参数的结构体
    pub av_format_context: *mut AVFormatContext,
    pub sdl_context: Sdl,
    pub audio_time: Arc<RwLock<f64>>,
    pub duration: i64,
    pub seek_receiver: Arc<Mutex<Receiver<i8>>>,
}

unsafe impl Send for MediaPlayer {}
unsafe impl Sync for MediaPlayer {}

impl MediaPlayer {
    pub fn new(video_path: CString, title: String) -> Self {
        let play_status = Arc::new(Mutex::new(PlayStatus::Start));
        let sdl = sdl2::init().unwrap();
        let audio_time = Arc::new(RwLock::new(0.0));
        let (seek_sender, seek_receiver): (Sender<i8>, Receiver<i8>) = channel::<i8>(2);
        MediaPlayer {
            video_path,
            video_player: VideoPlayer::new(play_status.clone(), title, sdl.clone(), seek_sender),
            audio_player: AudioPlayer::new(play_status.clone(), audio_time.clone()),
            play_status,
            av_format_context: null_mut(),
            sdl_context: sdl,
            audio_time,
            duration: 0,
            seek_receiver: Arc::new(Mutex::new(seek_receiver)),
        }
    }

    pub async fn init(&mut self) -> Result<(), CustomError> {
        //申请format上下文内存
        let av_format_context = unsafe { avformat_alloc_context() };
        if av_format_context.is_null() {
            println!("ERROR could not allocate memory for Format Context");
            return Err(CustomError::FfmepgError(
                "ERROR could not allocate memory for Format Context".to_owned(),
            ));
        }
        self.av_format_context = av_format_context;

        Ok(())
    }

    /**
     * 打开视频文件，读文件头内容，取得文件容器的封装信息及码流参数并存储在av_format_context 中
     */
    pub async fn format_open_input(&mut self) -> Result<(), CustomError> {
        if self.av_format_context.is_null() {
            return Err(CustomError::FfmepgError(
                "avformat_alloc_context alloc failed".to_owned(),
            ));
        }

        if unsafe {
            avformat_open_input(
                &mut self.av_format_context,
                self.video_path.as_ptr(),
                null_mut(),
                null_mut(),
            )
        } != 0
        {
            println!("Couldn't open file. path: {:?}", self.video_path);
            return Err(CustomError::FfmepgError("Couldn't open file".to_owned()));
        }

        Ok(())
    }

    /**
     * 取得文件中保存的码流信息，并填充到av_format_context->stream 字段
     */
    pub async fn find_stream_info(&mut self) -> Result<(), CustomError> {
        if unsafe { avformat_find_stream_info(self.av_format_context, null_mut()) } != 0 {
            println!("Couldn't find stream information");
            return Err(CustomError::FfmepgError(
                "Couldn't find stream information".to_owned(),
            ));
        };
        self.duration = unsafe { (*self.av_format_context).duration / AV_TIME_BASE as i64 };
        Ok(())
    }

    pub async fn dump_format(&self) -> Result<(), CustomError> {
        unsafe {
            av_dump_format(self.av_format_context, 0, self.video_path.as_ptr(), 0);
        };
        Ok(())
    }

    pub async fn nb_streams(&mut self) -> Result<(), CustomError> {
        let nb_streams = unsafe { (*self.av_format_context).nb_streams as isize };
        ////遍历文件中包含的所有流媒体类型(视频流、音频流、字幕流等)
        for index in 0..nb_streams {
            let av_stream: *mut *mut AVStream =
                unsafe { (*self.av_format_context).streams.offset(index) };
            let codec_parameters: *mut AVCodecParameters = unsafe { (**av_stream).codecpar };
            //根据音视频流对应的解码器上下文查找对应的解码器，返回对应的解码器(信息结构体)
            let av_codec = unsafe { avcodec_find_decoder((*codec_parameters).codec_id) };
            if av_codec.is_null() {
                return Err(CustomError::FfmepgError("Unsupported codec".to_owned()));
            }

            if unsafe { (*codec_parameters).codec_type } == AVMediaType::AVMEDIA_TYPE_VIDEO {
                let video_width = unsafe { (*codec_parameters).width };
                let video_height = unsafe { (*codec_parameters).height };
                self.video_player.original_width = video_width;
                self.video_player.original_height = video_height;
                //若文件中包含有视频流
                self.video_player.video_index = index;
                self.video_player.video_codec_parameters = codec_parameters;
                self.video_player.time_base = unsafe { (**av_stream).time_base };
                self.video_player.video_codec = av_codec as *mut AVCodec;
                self.video_player.fps = unsafe { av_q2d((**av_stream).avg_frame_rate) as i32 };
                // println!(
                //     "width: {}, height: {}, time_base:{:?}",
                //     self.video_player.width, self.video_player.height, self.video_player.time_base
                // );
            } else if unsafe { (*codec_parameters).codec_type } == AVMediaType::AVMEDIA_TYPE_AUDIO
                && -1 == self.audio_player.audio_index
            {
                //若文件中包含有音频流
                self.audio_player.audio_index = index;
                self.audio_player.audio_codec_parameters = codec_parameters;
                self.audio_player.audio_codec = av_codec as *mut AVCodec;
                self.audio_player.time_base = unsafe { (**av_stream).time_base };
                self.audio_player.channels = unsafe { (*codec_parameters).ch_layout.nb_channels};
                self.audio_player.sample_rate = unsafe { (*codec_parameters).sample_rate };
                self.audio_player.out_sample_rate = unsafe { (*codec_parameters).sample_rate };
                self.audio_player.out_channel_layout =
                    unsafe { &(*codec_parameters).ch_layout };
            }
        }
        if self.video_player.video_index == -1 && self.audio_player.audio_index == -1 {
            return Err(CustomError::FfmepgError("读取的文件无音视频流".to_owned()));
        }

        Ok(())
    }

    pub async fn read_packet(&self) -> Result<(), CustomError> {
        let mut player_clone = self.clone();
        tokio::spawn(async move {
            let _ = player_clone.loop_read().await;
        });
        Ok(())
    }

    pub async fn loop_read(&mut self) -> Result<(), CustomError> {
        loop {
            {
                let status = self.play_status.lock().await;
                if let PlayStatus::Pause = *status {
                    sleep(Duration::from_millis(MAX_STOP_SLEEP_TIME)).await;
                    continue;
                }
            }

            let mut pkt = Packet::default();
            let packet = unsafe { av_packet_alloc() };
            {
                //从文件中依次读取每个图像编码数据包，并存储在AVPacket数据结构中
                if unsafe { av_read_frame(self.av_format_context, packet) } >= 0 {
                    if unsafe { (*packet).stream_index } == self.video_player.video_index as i32
                        || unsafe { (*packet).stream_index } == self.audio_player.audio_index as i32
                    {
                        unsafe {
                            pkt.copy(packet);
                        }
                    } else {
                        unsafe {
                            av_packet_unref(packet);
                        }
                        //字幕等其他信息暂时不做处理
                        continue;
                    }
                } else {
                    unsafe {
                        av_packet_unref(packet);
                    }
                    {
                        let mut play_status_lock = self.play_status.lock().await;
                        *play_status_lock = PlayStatus::PacketEnd;
                        //println!("read frame end. ");
                    }
                    break;
                }
                //println!("1111111111111111111111111111111");
                unsafe {
                    av_packet_unref(packet);
                }
            }

            if pkt.stream_index == self.video_player.video_index as i32 {
                {
                    let _ = self
                        .video_player
                        .packet_queue_sender
                        .send(pkt.clone())
                        .await;
                    //println!("packet video put");
                }
            } else if pkt.stream_index == self.audio_player.audio_index as i32 {
                {
                    let _ = self
                        .audio_player
                        .packet_queue_sender
                        .send(pkt.clone())
                        .await;
                    //println!("packet audio put");
                }
            }
        }
        Ok(())
    }

    pub async fn decode_packet(&mut self) -> Result<(), CustomError> {
        let has_audio = self.audio_player.audio_index != -1;
        let has_video = self.video_player.video_index != -1;
        if has_video {
            let mut video_player_clone = self.video_player.clone();
            tokio::spawn(async move {
                match video_player_clone.decode_packet(has_audio).await {
                    Ok(_) => Ok(()),
                    Err(err) => Err(err),
                }
            });
        }
        if has_audio {
            let mut audio_player_clone = self.audio_player.clone();
            tokio::spawn(async move {
                match audio_player_clone.decode_packet(has_video).await {
                    Ok(_) => Ok(()),
                    Err(err) => Err(err),
                }
            });
        }
        Ok(())
    }

    pub async fn video_player(&mut self) -> Result<(), CustomError> {
        self.video_player
            .video_player(self.audio_time.clone())
            .await?;
        Ok(())
    }

    pub async fn audio_player(&self) -> Result<(), CustomError> {
        if self.audio_player.audio_index == -1 {
            return Ok(());
        }
        let mut audio_player_clone = self.audio_player.clone();
        let audio_player_handle = tokio::spawn(async move {
            match audio_player_clone.audio_player().await {
                Ok(_) => Ok(()),
                Err(err) => Err(err),
            }
        });
        if self.video_player.video_index == -1 {
            let _ = audio_player_handle.await;
        }
        Ok(())
    }

    pub async fn close(&mut self) -> Result<(), CustomError> {
        unsafe {
            //sws_freeContext(self.video_player.sws_context);
            swr_free(&mut self.audio_player.swr_context);
            avcodec_close(self.video_player.video_codec_context);
            avcodec_close(self.audio_player.audio_codec_context);
            avformat_close_input(&mut self.av_format_context);
        }
        Ok(())
    }

    pub async fn real_time(&self) -> Result<(), CustomError> {
        let duration = self.duration;
        let audio_time = self.audio_time.clone();
        tokio::spawn(async move {
            let hours = duration / 3600;
            let minutes = (duration % 3600) / 60;
            let seconds = duration % 60;
            execute!(stdout(), terminal::Clear(terminal::ClearType::All)).unwrap();
            loop {
                let play_hours;
                let play_minutes;
                let play_seconds;
                let play_time;
                {
                    let time_lock = audio_time.read().await;
                    play_time = *time_lock as i64;
                    play_hours = play_time / 3600;
                    play_minutes = (play_time % 3600) / 60;
                    play_seconds = play_time % 60;
                }

                let remain_time = duration - play_time;
                let remain_hours = remain_time / 3600;
                let remain_minutes = (remain_time % 3600) / 60;
                let remain_seconds = remain_time % 60;

                execute!(stdout(), cursor::MoveTo(0, 1)).unwrap();
                println!("total time: {:02}:{:02}:{:02}, play time: {:02}:{:02}:{:02}, remaining time: {:02}:{:02}:{:02}", hours, minutes, seconds, play_hours, play_minutes, play_seconds, remain_hours, remain_minutes, remain_seconds);
                stdout().flush().unwrap();
                sleep(Duration::from_secs(1)).await;
            }
        });
        Ok(())
    }

    pub async fn seek(&self) -> Result<(), CustomError> {
        let player_clone = self.clone();
        tokio::spawn(async move {
            loop {
                let incr = {
                    let mut seek_receiver_lock = player_clone.seek_receiver.lock().await;
                    match seek_receiver_lock.recv().await {
                        Some(incr) => incr as i64,
                        None => continue,
                    }
                };

                let status = match player_clone.stop().await {
                    Ok(status) => status,
                    Err(_) => panic!("get status"),
                };

                //println!("seek stop {:?}", status);

                let seek_rel;
                {
                    let audio_sec = player_clone.audio_time.read().await;
                    seek_rel = *audio_sec as i64;
                }

                let seek_pos = (seek_rel + incr) * AV_TIME_BASE as i64;
                let min_ts = if seek_rel > 0 {
                    seek_pos - seek_rel + 2
                } else {
                    i64::MIN
                };
                let max_ts = if seek_rel < 0 {
                    seek_pos - seek_rel - 2
                } else {
                    i64::MAX
                };
                //println!("audio_sec: {}, seek_pos: {}", seek_rel, seek_pos);
                let res = unsafe {
                    avformat_seek_file(
                        player_clone.av_format_context,
                        -1,
                        min_ts,
                        seek_pos,
                        max_ts,
                        AVSEEK_FLAG_BACKWARD,
                    )
                };

                // println!("seek res: {}", res);
                if res >= 0 {
                    player_clone.receiver_clear().await;
                    unsafe {
                        avcodec_flush_buffers(player_clone.video_player.video_codec_context);
                        avcodec_flush_buffers(player_clone.audio_player.audio_codec_context);
                    }
                }
                let _ = player_clone.start(status).await;
                // println!("seek end start video");
            }
        });
        Ok(())
    }

    pub async fn receiver_clear(&self) {
        let mut video_receiver_lock = self.video_player.packet_queue_receiver.lock().await;
        loop {
            if let Err(_) = video_receiver_lock.try_recv() {
                //println!("video break");
                break;
            }
        }
        //println!("video clear end");
        let mut audio_receiver_lock = self.audio_player.packet_queue_receiver.lock().await;
        loop {
            if let Err(_) = audio_receiver_lock.try_recv() {
                //println!("audio break");
                break;
            }
        }
        //println!("audio clear end");
    }

    pub async fn start(&self, prev_status: PlayStatus) -> Result<(), CustomError> {
        let mut play_status_lock = self.play_status.lock().await;
        *play_status_lock = prev_status;
        //println!("----------------------seek start");
        Ok(())
    }

    pub async fn stop(&self) -> Result<PlayStatus, CustomError> {
        let mut play_status_lock = self.play_status.lock().await;
        let curr_status = (*play_status_lock).clone();
        *play_status_lock = PlayStatus::Pause;
        //println!("stop");
        Ok(curr_status)
    }

    pub async fn player(&mut self) -> Result<(), CustomError> {
        self.init().await?;
        self.format_open_input().await?;
        self.find_stream_info().await?;
        self.nb_streams().await?;
        self.dump_format().await?;
        //self.real_time().await?;

        self.video_player.codec_alloc_context().await?;
        self.video_player.parameters_to_context().await?;
        self.video_player.open_codec().await?;
        self.video_player.init_window().await?;

        self.audio_player.codec_alloc_context().await?;
        self.audio_player.parameters_to_context().await?;
        self.audio_player.open_codec().await?;
        self.audio_player.swr_context().await?;
        self.audio_player.get_samples_buffer_size().await?;
        self.audio_player
            .init_audio_sdl(&mut self.sdl_context)
            .await?;

        self.read_packet().await?;
        self.decode_packet().await?;

        self.seek().await?;

        self.audio_player().await?;
        self.video_player().await?;

        self.close().await?;

        Ok(())
    }
}
