﻿using FFmpeg.AutoGen;
using FFmpegLib;
using Silk.NET.OpenAL;
using System;
using System.Collections.Concurrent;
using System.IO;
using System.Threading.Tasks;

namespace MediaLib.Implement;

public unsafe sealed class SilkMediaPlayer : IDisposable
{
    //public delegate void DisplayDelegate(AVFrame* frame);
    //public DisplayDelegate DisplayerAvtion { get; set; }
    public Action<FAVFrame> VideoPlayAction { get; set; }

    private FAVFormatContext InFmtCtx;
    private FSwsContext SwsCtx;
    private FSwrContext SwrCtx;
    private ConcurrentQueue<PcmFrame> AudioFifo;
    private FrameQueue VideoFifo;

    // OpenAL
    private SilkOpenALUtils OpenAL;

    private int SampleRate;
    private int ChannelNum;
    private int PerSampleSize;
    private BufferFormat target_fmt;
    private AVSampleFormat out_sample_fmt;

    private bool SetPts;
    private long CurPts;
    /// <summary>
    /// 存在音频流
    /// </summary>
    public bool IsExistAudioStream { get => InFmtCtx?.AudioIndex > -1; }
    /// <summary>
    /// 存在视频流
    /// </summary>
    public bool IsExistVideoStream { get => InFmtCtx?.VideoIndex > -1; }

    private bool PlayEnd;

    public string VideoFile { get; set; }
    public SourceState PlayStatus { get; set; }
    public bool Paly { get => PlayStatus == SourceState.Playing; }

    public SilkMediaPlayer(string file = null)
    {
        VideoFile = file;
        InFmtCtx = new FAVFormatContext();
        PlayStatus = SourceState.Initial;
        VideoFifo = new FrameQueue();
        AudioFifo = new ConcurrentQueue<PcmFrame>();
    }

    public void Open()
    {
        if (!File.Exists(VideoFile))
            throw new FileNotFoundException($"文件({VideoFile})不存在");

        InFmtCtx ??= new FAVFormatContext();
        InFmtCtx.OpenFmtCtx(VideoFile);

        OpenAL = new SilkOpenALUtils();
    }

    public void Play()
    {
        if (!OpenAL.Initialization) { return; }
        PlayStatus = SourceState.Playing;
        Task.Factory.StartNew(LoadMediaDataTask);
        if (IsExistAudioStream)
            Task.Factory.StartNew(PlayAudioTask);
        if (IsExistVideoStream)
            Task.Factory.StartNew(PlayVideoTask);
        OpenAL.SourcePlay();
    }

    private void LoadMediaDataTask()
    {
        while (Paly)
        {
            if (PlayStatus == SourceState.Stopped)
                return;
            if (CheckNeedLoad())
                LoadFrame();
            else
                ffmpeg.av_usleep(10000u);
        }
    }

    private void PlayAudioTask()
    {
        while (true)
        {
            int queuedNum = OpenAL.BuffersQueued();
            // 读取当前源播放状态
            SourceState curStat = OpenAL.GetSourceState();

            #region 控制播放状态
            if (PlayStatus == SourceState.Playing && queuedNum > 0 && curStat != SourceState.Playing)
                OpenAL.SourcePlay();
            else if (PlayStatus == SourceState.Paused && curStat == SourceState.Playing)
                OpenAL.SourcePause();
            else if (PlayStatus == SourceState.Stopped)
            {
                OpenAL.SourceStop();
                Dispose();
                break;
            }
            #endregion
            if (queuedNum < 3)
            {
                if (AudioFifo.TryDequeue(out PcmFrame pcm))
                {
                    OpenAL.QueueBuffer(pcm.Pcm, pcm.Size, target_fmt, pcm.SampleRate);
                    CurPts = pcm.NewPts;
                    pcm.Dispose();
                }
            }

            OpenAL.ClearUnqueueBuffers();
            ffmpeg.av_usleep(1000u);
        }
    }

    private void PlayVideoTask()
    {
        DateTime start = DateTime.Now;
        while (PlayStatus != SourceState.Stopped)
        {
            using FAVFrame yuvFrame = VideoFifo.Dequeue(ref this.CurPts);
            long curNewPts = 0;
            if (yuvFrame != null)
            {
                curNewPts = yuvFrame.NewPts;
                try { VideoPlayAction?.Invoke(yuvFrame); }
                catch { }
                finally { yuvFrame.Dispose(); }
            }

            if (IsExistAudioStream)
                ffmpeg.av_usleep(20000u);
            else
            {
                CurPts = curNewPts;
                double t1 = (DateTime.Now - start).TotalMilliseconds;
                double dist = CurPts - t1;
                if (dist > 0)
                    ffmpeg.av_usleep((uint)(dist * 1000));
                else
                    continue;
            }

        }
    }

    private bool CheckNeedLoad()
    {
        if (PlayStatus == SourceState.Paused) { return false; }
        if (PlayEnd) { return true; }
        return (IsExistAudioStream && AudioFifo.Count < 40) || (IsExistVideoStream && VideoFifo.Size() < 25);
    }

    private void LoadFrame()
    {
        using FAVPacket packet = new FAVPacket();
        int ret = InFmtCtx.ReadPacket(packet);
        if (ret < 0)
            goto Error;

        FAVFrame frame = new FAVFrame();
        ret = InFmtCtx.Decode(packet, frame);
        if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
        {
            frame.Dispose();
            goto Error;
        }

        if (packet.StreamIndex == InFmtCtx.VideoIndex)
        {
            SwsCtx ??= new FSwsContext(frame, AVPixelFormat.AV_PIX_FMT_YUV420P);
            if (SwsCtx.Ctx == null)
            {
                frame.Dispose();
                throw new ArgumentNullException("SwsCtx为null");
            }

            if (!SetPts)
            {
                SetPts = true;
                InFmtCtx.VideoFirstPts = frame.Pts;
            }
            frame.InitNewPts(InFmtCtx.VideoStream.TimeBase);

            if (frame.Ptr->format < 1)
                VideoFifo.Enqueue(frame);//DisplayerAvtion?.Invoke(frame);

            else if (AVPixelFormat.AV_PIX_FMT_YUV420P != frame.VideoFormat)
            {
                FAVFrame yuvFrame = SwsCtx.ConvertVideoFrameToYUV(frame);
                if (yuvFrame != null)
                {
                    yuvFrame.InitNewPts(InFmtCtx.VideoStream.TimeBase);
                    VideoFifo.Enqueue(yuvFrame); //DisplayerAvtion?.Invoke(yuvFrame); //yuvFrame.Dispose();
                }
                frame.Dispose();
            }
        }
        else if (packet.StreamIndex == InFmtCtx.AudioIndex)
        {
            if (target_fmt == 0)
            {
                SampleRate = frame.SampleRate;
                ChannelNum = frame.Channels;
                (AVSampleFormat avFmt, BufferFormat alFmt) = SilkOpenALUtils.AVSampleFmtToALFmt((AVSampleFormat)(frame.Format), frame.Channels);
                target_fmt = alFmt;
                out_sample_fmt = avFmt;
                PerSampleSize = ffmpeg.av_get_bytes_per_sample(out_sample_fmt);

                SwrCtx ??= new FSwrContext(frame, out_sample_fmt);
                if (SwrCtx.Inited == false)
                    return;
            }

            if (AudioFifo == null)
                throw new ArgumentException("AudioDecodeUtil->_fifo初始化异常");

            PcmFrame pcm = SwrCtx.ConvertAudioPcmData(frame, out_sample_fmt);
            pcm.InitPts(frame.Pts, InFmtCtx.AudioStream.TimeBase);

            if (pcm.NbSamples > 0)
                AudioFifo.Enqueue(pcm);
            else
                pcm.Dispose();

            frame?.Dispose();
        }

    Error:
        packet?.Dispose();
        return;
    }

    #region Dispose
    private bool disposedValue;

    protected void Dispose(bool disposing)
    {
        if (!disposedValue)
        {
            if (disposing)
            {
                InFmtCtx?.Dispose();
                SwsCtx?.Dispose();
                VideoFifo?.Dispose();
                while (AudioFifo?.Count > 0)
                    if (AudioFifo.TryDequeue(out PcmFrame pcm))
                        pcm.Dispose();
            }
            disposedValue = true;
        }
    }

    public void Dispose()
    {
        // 不要更改此代码。请将清理代码放入“Dispose(bool disposing)”方法中
        Dispose(disposing: true);
        GC.SuppressFinalize(this);
    }
    #endregion
}