﻿using FFmpeg.AutoGen;
using Silk.NET.OpenAL;
using System;

namespace MediaLib
{
    public sealed unsafe class SilkOpenALUtilities : IDisposable
    {
        private AL _al;
        private ALContext _alc;
        private Device* _device;
        private Context* _ctx;
        private uint _sid;

        public bool Initialization { get; private set; }

        public SilkOpenALUtilities()
        {
            Init();
        }

        public bool Init()
        {
            if (Initialization) { return true; }
            lock (this)
            {
                if (Initialization) { return true; }
                bool open = SilkOpenALUtilities.OpenDevice(out _al, out _alc, out _device, out _ctx);
                if (!open) { return false; }
                _sid = _al.GenSource();

                _al.SetSourceProperty(_sid, SourceFloat.Pitch, 1.0f);
                _al.SetSourceProperty(_sid, SourceFloat.Gain, 1.0f);
                _al.SetSourceProperty(_sid, SourceFloat.ReferenceDistance, 50f);
                _al.SetSourceProperty(_sid, SourceVector3.Position, 0, 0, 0);
                _al.SetSourceProperty(_sid, SourceVector3.Velocity, 0, 0, 0);
                _al.SetSourceProperty(_sid, SourceBoolean.Looping, false);
                _al.SetSourceProperty(_sid, SourceInteger.SourceType, (int)SourceType.Static);
                _al.SpeedOfSound(343.3f);
                _al.DopplerFactor(1f);
                _al.DistanceModel(DistanceModel.LinearDistanceClamped);
                _al.SetListenerProperty(ListenerVector3.Position, 0, 0, 0);
                return Initialization = true;
            }
        }

        public void SourcePause() => _al.SourcePause(_sid);

        public void SourcePlay() => _al.SourcePlay(_sid);

        public void SourceStop() => _al.SourceStop(_sid);

        public void SourceRewind() => _al.SourceRewind(_sid);

        public void SetVolume(float vol) => _al.SetSourceProperty(_sid, SourceFloat.Gain, vol);

        public void SetSpeed(float vol) => _al.SpeedOfSound(vol);

        public void QueueBuffer(void* buffer, int size, BufferFormat fmt, int sampleRate)
        {
            uint bufferId = _al.GenBuffer();
            _al.BufferData(bufferId, fmt, buffer, size, sampleRate);
            _al.SourceQueueBuffers(_sid, 1, &bufferId);
        }

        public int BuffersProcessed()
        {
            _al.GetSourceProperty(_sid, GetSourceInteger.BuffersProcessed, out int processdNum);
            return processdNum;
        }

        public int BuffersQueued()
        {
            _al.GetSourceProperty(_sid, GetSourceInteger.BuffersQueued, out int queuedNum);
            return queuedNum;
        }

        public SourceState GetSourceState()
        {
            _al.GetSourceProperty(_sid, GetSourceInteger.SourceState, out int status);
            return (SourceState)status;
        }

        /// <summary>
        /// 检查并清除已播放的缓存
        /// </summary>
        public void ClearUnqueueBuffers()
        {
            _al.GetSourceProperty(_sid, GetSourceInteger.BuffersProcessed, out int processdNum);
            if (processdNum > 0)
            {
                uint[] bids = new uint[processdNum];
                _al.SourceUnqueueBuffers(_sid, bids);
                _al.DeleteBuffers(bids);
            }
        }

        private void CloseAudioDevice()
        {
            _al.SourceStop(_sid);
            ClearUnqueueBuffers();
            _al.Dispose();
            _alc.CloseDevice(_device);
            _device = null;
            _alc.DestroyContext(_ctx);
            _ctx = null;
            _alc.Dispose();
        }

        #region dispose
        private bool disposedValue;
        protected void Dispose(bool disposing)
        {
            if (!disposedValue)
            {
                if (disposing)
                {
                    CloseAudioDevice();
                }
                disposedValue = true;
            }
        }

        public void Dispose()
        {
            // 不要更改此代码。请将清理代码放入“Dispose(bool disposing)”方法中
            Dispose(disposing: true);
            GC.SuppressFinalize(this);
        }
        #endregion

        /// <summary>
        /// 打开音频播放设备
        /// </summary>
        /// <param name="device"></param>
        /// <param name="context"></param>
        /// <returns></returns>
        public static bool OpenDevice(out AL al, out ALContext alc, out Device* device, out Context* ctx)
        {
            al = AL.GetApi();
            alc = ALContext.GetApi();
            device = alc.OpenDevice(null);
            ctx = alc.CreateContext(device, null);
            return alc.MakeContextCurrent(ctx);
        }

        public static BufferFormat GetALFormat(AL al, int channel, int depthBit)
        {
            BufferFormat fmt = 0;
            if (channel == 1)
            {
                if (depthBit == 8)
                    fmt = BufferFormat.Mono8;
                else if (depthBit == 16)
                    fmt = BufferFormat.Mono16;
            }
            else if (channel == 2)
            {
                if (depthBit == 8)
                    fmt = BufferFormat.Stereo8;
                else if (depthBit == 16)
                    fmt = BufferFormat.Stereo16;
            }
            else if (al.IsExtensionPresent("AL_EXT_MCFORMATS"))
            {
                if (depthBit == 8)
                {
                    if (channel == 4)
                        fmt = (BufferFormat)al.GetEnumValue("AL_FORMAT_QUAD8");
                    else if (channel == 6)
                        fmt = (BufferFormat)al.GetEnumValue("AL_FORMAT_51CHN8");
                }
                else if (depthBit == 16)
                {
                    if (channel == 4)
                        fmt = (BufferFormat)al.GetEnumValue("AL_FORMAT_QUAD16");
                    else if (channel == 6)
                        fmt = (BufferFormat)al.GetEnumValue("AL_FORMAT_51CHN16");
                }
            }
            return fmt;
        }

        public static ValueTuple<AVSampleFormat, ushort> AVSampleFmtToSdlFmt(AVSampleFormat in_fmt)
        {
            ushort sdlFmt = 0;
            AVSampleFormat av_fmt = AVSampleFormat.AV_SAMPLE_FMT_NONE;
            switch (in_fmt)
            {
                case AVSampleFormat.AV_SAMPLE_FMT_U8:
                case AVSampleFormat.AV_SAMPLE_FMT_U8P:
                    av_fmt = AVSampleFormat.AV_SAMPLE_FMT_U8;
                    sdlFmt = Silk.NET.SDL.Sdl.AudioU8;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S16:
                case AVSampleFormat.AV_SAMPLE_FMT_S16P:
                    av_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
                    sdlFmt = Silk.NET.SDL.Sdl.AudioS16;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S64:
                case AVSampleFormat.AV_SAMPLE_FMT_S64P:
                case AVSampleFormat.AV_SAMPLE_FMT_DBL:
                case AVSampleFormat.AV_SAMPLE_FMT_DBLP:
                case AVSampleFormat.AV_SAMPLE_FMT_S32:
                case AVSampleFormat.AV_SAMPLE_FMT_S32P:
                case AVSampleFormat.AV_SAMPLE_FMT_FLT:
                case AVSampleFormat.AV_SAMPLE_FMT_FLTP:
                    av_fmt = AVSampleFormat.AV_SAMPLE_FMT_S32;
                    sdlFmt = Silk.NET.SDL.Sdl.AudioS32;
                    break;
                default:
                    break;
            }
            return (av_fmt, sdlFmt);
        }

        public static ValueTuple<AVSampleFormat, BufferFormat> AVSampleFmtToALFmt(AVSampleFormat in_fmt, int channels)
        {
            BufferFormat out_ALFmt = 0;
            AVSampleFormat av_fmt = AVSampleFormat.AV_SAMPLE_FMT_NONE;
            switch (in_fmt)
            {
                case AVSampleFormat.AV_SAMPLE_FMT_U8:
                case AVSampleFormat.AV_SAMPLE_FMT_U8P:
                    av_fmt = AVSampleFormat.AV_SAMPLE_FMT_U8;
                    if (channels == 1)
                        out_ALFmt = BufferFormat.Mono8;
                    else if (channels == 2)
                        out_ALFmt = BufferFormat.Stereo8;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S16:
                case AVSampleFormat.AV_SAMPLE_FMT_S16P:
                    av_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
                    if (channels == 1)
                        out_ALFmt = BufferFormat.Mono16;
                    else if (channels == 2)
                        out_ALFmt = BufferFormat.Stereo16;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S32:
                case AVSampleFormat.AV_SAMPLE_FMT_S32P:
                case AVSampleFormat.AV_SAMPLE_FMT_FLT:
                case AVSampleFormat.AV_SAMPLE_FMT_FLTP:
                    av_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
                    if (channels == 1)
                        out_ALFmt = BufferFormat.Mono16;
                    else if (channels == 2)
                        out_ALFmt = BufferFormat.Stereo16;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S64:
                case AVSampleFormat.AV_SAMPLE_FMT_S64P:
                case AVSampleFormat.AV_SAMPLE_FMT_DBL:
                case AVSampleFormat.AV_SAMPLE_FMT_DBLP:
                    av_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
                    if (channels == 1)
                        out_ALFmt = BufferFormat.Mono16;
                    else if (channels == 2)
                        out_ALFmt = BufferFormat.Stereo16;
                    break;
                default:
                    break;
            }
            return (av_fmt, out_ALFmt);
        }

    }
}