﻿using FFmpeg.AutoGen;
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;

namespace FFmpegLib
{
    public sealed unsafe class MediaDecodeUtils : IDisposable
    {
        private bool disposedValue;

        public string VideoFile { get; private set; }
        private List<GCHandle> gCHandles = new List<GCHandle>(10);

        public AVFormatContext* in_fmt_ctx;
        /// <summary>
        /// 视频解码器
        /// </summary>
        public AVCodecContext* video_codec_ctx;
        /// <summary>
        /// 音频解码器
        /// </summary>
        public AVCodecContext* audio_codec_ctx;
        /// <summary>
        /// 时间基
        /// </summary>
        public AVRational time_base_q;
        public double total_sec;
        public int video_index = -1;
        public int audio_index = -1;
        public AVCodecID video_cid;
        public AVCodecID audio_cid;
        /// <summary>
        /// 图像
        /// </summary>
        public SwsContext* sws_ctx;
        /// <summary>
        /// 音频
        /// </summary>
        public SwrContext* swr_ctx;
        /// <summary>
        /// 音频存储队列
        /// </summary>
        public AVAudioFifo* _fifo;

        public MediaDecodeUtils() { }

        public bool Open(string file)
        {
            VideoFile = file;
            time_base_q = ffmpeg.av_get_time_base_q();

            AVFormatContext* in_ctx = ffmpeg.avformat_alloc_context();
            //ffmpeg.avformat_init_output
            int ret = ffmpeg.avformat_open_input(&in_ctx, VideoFile, null, null);
            if (ret != 0) { goto error; }
            ffmpeg.av_format_inject_global_side_data(in_ctx);
            ret = ffmpeg.avformat_find_stream_info(in_ctx, null);
            if (ret != 0) { goto error; }

            for (int i = 0; i < in_ctx->nb_streams; i++)
            {
                AVCodecParameters* codecPar = in_ctx->streams[i]->codecpar;
                if (codecPar->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    video_index = i;
                    video_cid = in_ctx->video_codec_id = codecPar->codec_id;
                    video_codec_ctx = FFmpegUtils.GetCodecCtx(video_cid, true, codecPar);
                }
                else if (codecPar->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    audio_index = i;
                    audio_cid = in_ctx->audio_codec_id = codecPar->codec_id;
                    audio_codec_ctx = FFmpegUtils.GetCodecCtx(audio_cid, true, codecPar);
                }
            }
            in_fmt_ctx = in_ctx;
            //ffmpeg.av_seek_frame(in_ctx,1,0,ffmpeg.AVSEEK_FLAG_BACKWARD);
            total_sec = ffmpeg.av_q2d(time_base_q) * in_ctx->duration;
            return true;

        error:
            string errStr = GetErrorMessage(ret);
            ffmpeg.avformat_free_context(in_ctx);
            return false;
        }

        public int GetAVPacket(AVPacket* packet)
        {
            return ffmpeg.av_read_frame(in_fmt_ctx, packet);
        }

        public void GetData(AVPacket* in_packet, AVFrame* out_frame)
        {
            int ret = -1;
            if (in_packet->stream_index == video_index)
            {
                ret = FFmpegUtils.DecodeAVPacket(video_codec_ctx, in_packet, out_frame);
                AVPixelFormat fmt = (AVPixelFormat)out_frame->format;
            }
            else if (in_packet->stream_index == audio_index)
            {
                ret = FFmpegUtils.DecodeAVPacket(audio_codec_ctx, in_packet, out_frame);
                AVSampleFormat fmt = (AVSampleFormat)out_frame->format;
            }
        }

        public bool AllocFifo(int sample_num = 40000, AVSampleFormat fmt = AVSampleFormat.AV_SAMPLE_FMT_S16, int channels = 2)
        {
            AVAudioFifo* fifo = ffmpeg.av_audio_fifo_alloc(fmt, channels, sample_num);
            return (this._fifo = fifo) != null;
        }

        private static string GetErrorMessage(int error)
        {
            int bufferSize = 1024;
            byte* buffer = stackalloc byte[bufferSize];
            ffmpeg.av_strerror(error, buffer, (ulong)bufferSize);
            return Marshal.PtrToStringAnsi((IntPtr)buffer);
        }

        private void FreeAll()
        {
            if (audio_codec_ctx != null)
                fixed (AVCodecContext** toFreeCtx = &audio_codec_ctx)
                    ffmpeg.avcodec_free_context(toFreeCtx);

            if (video_codec_ctx != null)
                fixed (AVCodecContext** toFreeCtx = &video_codec_ctx)
                    ffmpeg.avcodec_free_context(toFreeCtx);

            if (in_fmt_ctx != null)
                ffmpeg.avformat_free_context(in_fmt_ctx);
        }

        protected void Dispose(bool disposing)
        {
            if (!disposedValue)
            {
                if (disposing)
                {
                    // TODO: 释放托管状态(托管对象)
                    FreeAll();
                }
                // TODO: 释放未托管的资源(未托管的对象)并重写终结器
                // TODO: 将大型字段设置为 null
                disposedValue = true;
            }

        }

        // // TODO: 仅当“Dispose(bool disposing)”拥有用于释放未托管资源的代码时才替代终结器
        // ~VideoDecodeUtils()
        // {
        //     // 不要更改此代码。请将清理代码放入“Dispose(bool disposing)”方法中
        //     Dispose(disposing: false);
        // }

        public void Dispose()
        {
            // 不要更改此代码。请将清理代码放入“Dispose(bool disposing)”方法中
            Dispose(disposing: true);
            GC.SuppressFinalize(this);
        }
    }
}