﻿using FFmpeg.AutoGen;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;

namespace FFmpegLib
{
    public sealed unsafe class FFmpegUtils
    {
        private static readonly object FFmpegLogLock = new object();
        // 是否已初始化
        private static bool FFmpegInitialization = false;
        private static av_log_set_callback_callback FFmpegLogCallback;
        // 日志回调
        public static Action<string> DebugLog;

        #region 对外封装的静态API
        /// <summary>
        /// 获取和打开编解码器
        /// </summary>
        /// <param name="codec_id">编解码器id</param>
        /// <param name="isDecoder">true:解码器,false编码器</param>
        /// <param name="param">参数</param>
        /// <returns></returns>
        public unsafe static AVCodecContext* GetCodecCtx(AVCodecID codec_id, bool isDecoder, AVCodecParameters* param)
        {
            int ret = -1;
            AVCodec* codec = isDecoder ? ffmpeg.avcodec_find_decoder(codec_id) : ffmpeg.avcodec_find_encoder(codec_id);
            AVCodecContext* codec_ctx = ffmpeg.avcodec_alloc_context3(codec);
            if (param == null)
                goto Error;

            if ((ret = ffmpeg.avcodec_parameters_to_context(codec_ctx, param)) < 0)
                goto Error;

            if ((ret = ffmpeg.avcodec_open2(codec_ctx, codec, null)) < 0)
                goto Error;

            return codec_ctx;

        Error:
            ffmpeg.avcodec_free_context(&codec_ctx);
            return null;
        }

        /// <summary>
        /// 释放解码器
        /// </summary>
        /// <param name="codec_ctx"></param>
        /// <returns></returns>
        public unsafe static int FreeDecoder(AVCodecContext* codec_ctx)
        {
            int ret = ffmpeg.avcodec_close(codec_ctx);
            ffmpeg.avcodec_free_context(&codec_ctx);
            return ret;
        }

        public static SwsContext* GetSwsContext(AVFrame* in_frame, AVPixelFormat dstFmt, int dstW = 0, int dstH = 0)
        {
            if (dstW == 0) { dstW = in_frame->width; }
            if (dstH == 0) { dstH = in_frame->height; }
            return ffmpeg.sws_getContext(in_frame->width, in_frame->height, (AVPixelFormat)in_frame->format, dstW, dstH, dstFmt, 2, null, null, null);
        }

        public static SwrContext* GetSwrContext(AVFrame* in_frame, AVSampleFormat out_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16)
        {
            SwrContext* swr_ctx = ffmpeg.swr_alloc();
            AVSampleFormat in_fmt = (AVSampleFormat)(in_frame->format);
            int in_sample_rate = in_frame->sample_rate;
            long in_channel_layout = (long)(in_frame->channel_layout);

            int out_sample_rate = in_sample_rate;
            long out_channel_layout = in_channel_layout;

            swr_ctx = ffmpeg.swr_alloc_set_opts(swr_ctx,
                                              in_channel_layout,//ffmpeg.av_get_default_channel_layout(output_codec_context->channels),
                                              out_fmt,
                                              out_sample_rate,
                                              out_channel_layout,//av_get_default_channel_layout(input_codec_context->channels),
                                              in_fmt,//input_codec_context->sample_fmt,
                                              in_sample_rate,//input_codec_context->sample_rate,
                                              0, null);
            int ret = ffmpeg.swr_init(swr_ctx);
            if (ret < 0)
            {
                ffmpeg.swr_free(&swr_ctx);
                swr_ctx = null;
            }
            return swr_ctx;
        }

        /// <summary>
        /// 解码AVPacket
        /// </summary>
        /// <param name="dec_ctx">解码器</param>
        /// <param name="in_packet">输入</param>
        /// <param name="out_frame">输出</param>
        /// <returns></returns>
        public static int DecodeAVPacket(AVCodecContext* dec_ctx, AVPacket* in_packet, AVFrame* out_frame)
        {
            int ret = ffmpeg.avcodec_send_packet(dec_ctx, in_packet);
            if (ret < 0)
            {
                //IntPtr ptr = Marshal.AllocHGlobal(500);
                //byte* errStr = (byte*)ptr.ToPointer();
                //ffmpeg.av_strerror(ret, errStr, 10);
                //string errInfo = Marshal.PtrToStringAnsi((IntPtr)errStr);
                //Debug.WriteLine("Error submitting the packet to the decoder");
                //Marshal.FreeHGlobal(ptr);
                return ret;
            }
            // read all the output frames (in general there may be any number of them 
            return ffmpeg.avcodec_receive_frame(dec_ctx, out_frame);
            //int decodeTimes = 0;
            //while (decodeTimes < 10)
            //{
            //    ret = ffmpeg.avcodec_receive_frame(dec_ctx, frame);
            //    if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
            //        continue;
            //    else if (ret < 0)
            //    {
            //        //Debug.WriteLine("Error during decoding");
            //        break;
            //    }
            //    break;
            //    decodeTimes++;
            //}
            //return ret;
        }

        /// <summary>
        /// 编码AVFrame
        /// </summary>
        /// <param name="dec_ctx">编码器</param>
        /// <param name="in_frame">输入</param>
        /// <param name="out_packet">输出</param>
        /// <returns></returns>
        public static int EncodeAVPacket(AVCodecContext* dec_ctx, AVFrame* in_frame, AVPacket* out_packet)
        {
            int ret = ffmpeg.avcodec_send_frame(dec_ctx, in_frame);
            if (ret < 0)
            {
                return ret;
            }
            return ffmpeg.avcodec_receive_packet(dec_ctx, out_packet);
        }

        /// <summary>
        /// 编码
        /// </summary>
        /// <param name="enc_ctx">编码器</param>
        /// <param name="in_frame">输入帧</param>
        /// <param name="out_pkt">输出包</param>
        /// <returns></returns>
        public static int Ecoding(AVCodecContext* enc_ctx, AVFrame* in_frame, AVPacket* out_pkt)
        {
            //int ret = ffmpeg.avcodec_send_frame(enc_ctx, in_frame);
            //if (ret < 0) { return ret; }
            return ffmpeg.avcodec_receive_packet(enc_ctx, out_pkt);
        }

        /// <summary>
        /// 时间基转换
        /// </summary>
        /// <param name="stream"></param>
        /// <param name="packet"></param>
        /// <param name="encoding">true编码，false解码</param>
        public static void TimebaseConversion(AVStream* stream, AVPacket* packet, bool encoding = true)
        {
            #region ffmpeg 4.4.1及之前的版本
            //// video
            //if (stream->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
            //{
            //    AVRational raw_video_time_base = ffmpeg.av_inv_q(stream->codec->framerate);
            //    if (encoding)
            //        stream->codec->time_base = raw_video_time_base;
            //    ffmpeg.av_packet_rescale_ts(packet, stream->time_base, raw_video_time_base);
            //}
            //// audio
            //else if (stream->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
            //{
            //    AVRational raw_audio_time_base = ffmpeg.av_inv_q(new AVRational { num = 1, den = stream->codec->sample_rate });
            //    if (encoding)
            //        stream->codec->time_base = raw_audio_time_base;
            //    ffmpeg.av_packet_rescale_ts(packet, stream->time_base, raw_audio_time_base);
            //}
            #endregion
        }

        /// <summary>
        /// 时间戳转换
        /// </summary>
        /// <param name="in_stream"></param>
        /// <param name="out_stream"></param>
        /// <param name="pkt"></param>
        public static void AVPacketTimestampConversion(AVStream* in_stream, AVStream* out_stream, AVPacket* pkt)
        {
            //pkt->pts = ffmpeg.av_rescale_q_rnd(pkt->pts, in_stream->time_base, out_stream->time_base, (AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX));
            //pkt->dts = ffmpeg.av_rescale_q_rnd(pkt->dts, in_stream->time_base, out_stream->time_base, (AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX));
            //pkt->duration = ffmpeg.av_rescale_q(pkt->duration, in_stream->time_base, out_stream->time_base);
            // 以上三行使用 av_packet_rescale_ts替代
            ffmpeg.av_packet_rescale_ts(pkt, in_stream->time_base, out_stream->time_base);
            pkt->pos = -1;
        }

        /// <summary>
        /// 从输入流读取AVPacket
        /// </summary>
        /// <param name="ifmt_ctx">输入流上下文</param>
        /// <param name="out_pkt">读取包</param>
        /// <returns></returns>
        public static int ReadPacket(AVFormatContext* ifmt_ctx, AVPacket* out_pkt)
        {
            //ffmpeg.av_init_packet(out_pkt);
            return ffmpeg.av_read_frame(ifmt_ctx, out_pkt);
        }

        /// <summary>
        /// AVPacket写入输出流
        /// </summary>
        /// <param name="ifmt_ctx"></param>
        /// <param name="ofmt_ctx"></param>
        /// <param name="pkt"></param>
        /// <returns></returns>
        public static bool WritePacket(AVFormatContext* ifmt_ctx, AVFormatContext* ofmt_ctx, AVPacket* pkt)
        {
            AVStream* in_stream = ifmt_ctx->streams[pkt->stream_index];
            AVStream* out_stream = ofmt_ctx->streams[pkt->stream_index];
            //pkt->pts = ffmpeg.av_rescale_q_rnd(pkt->pts, in_stream->time_base, out_stream->time_base, (AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX));
            //pkt->dts = ffmpeg.av_rescale_q_rnd(pkt->dts, in_stream->time_base, out_stream->time_base, (AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX));
            //pkt->duration = ffmpeg.av_rescale_q(pkt->duration, in_stream->time_base, out_stream->time_base);

            // 以上三行使用 av_packet_rescale_ts替代
            ffmpeg.av_packet_rescale_ts(pkt, in_stream->time_base, out_stream->time_base);
            pkt->pos = -1;

            int ret = ffmpeg.av_interleaved_write_frame(ofmt_ctx, pkt);
            return ret >= 0;
        }

        /// <summary>
        /// AVFrame写入输出流
        /// </summary>
        /// <param name="ofmt_ctx"></param>
        /// <param name="frame"></param>
        /// <param name="type"></param>
        /// <returns></returns>
        public static bool WriteFrame(AVFormatContext* ofmt_ctx, AVFrame* frame, AVMediaType type)
        {
            AVPacket* pkt = ffmpeg.av_packet_alloc();
            //ffmpeg.av_init_packet(pkt);

            // 1.编码成AVPacket*
            AVStream* cur_stream = null;
            uint nb_streams = ofmt_ctx->nb_streams;
            #region ffmpeg 4.4.1及之前的版本
            //for (int i = 0; i < nb_streams; i++)
            //{
            //    if (ofmt_ctx->streams[i]->codec->codec_type == type)
            //    {
            //        cur_stream = ofmt_ctx->streams[i];
            //        break;
            //    }
            //}
            //// 2.编码
            //int ret = Ecoding(cur_stream->codec, frame, pkt);
            //if (ret < 0) { goto error; }
            #endregion
            // 3.时间基
            TimebaseConversion(cur_stream, pkt, true);
            pkt->stream_index = cur_stream->index;
            // 4.将编码帧写入输出媒体文件
            int ret = ffmpeg.av_interleaved_write_frame(ofmt_ctx, pkt);
            ffmpeg.av_packet_free(&pkt);
            return ret == 0;

        error:
            ffmpeg.av_packet_free(&pkt);
            return false;

            //ffmpeg.avcodec_decode_video2();
        }

        public static int ConvertAudioFrameToPCM(SwrContext* swr_ctx, AVFrame* in_frame, byte* buffer, uint buffer_size, AVSampleFormat target_fmt)
        {
            AVSampleFormat cur_fmt = (AVSampleFormat)in_frame->format;
            int is_planar = ffmpeg.av_sample_fmt_is_planar(cur_fmt);
            if (target_fmt == cur_fmt && is_planar == 0)
            {
                Unsafe.CopyBlock(buffer, in_frame->data[0], buffer_size);
                return in_frame->nb_samples;
            }
            return ffmpeg.swr_convert(swr_ctx, &buffer, in_frame->nb_samples, in_frame->extended_data, in_frame->nb_samples);
        }

        public static int ConvertVideoFrameToYUV(SwsContext* sws_ctx, AVFrame* in_frame, AVFrame* out_frame)
        {
            out_frame->width = in_frame->width;
            out_frame->height = in_frame->height;
            //ffmpeg.sws_scale(pSwsContext, ptr->data, ptr->linesize, 0, ptr->height, ptr2->data, ptr2->linesize).ThrowIfError();
            return ffmpeg.sws_scale(sws_ctx, in_frame->data, in_frame->linesize, 0, in_frame->height, out_frame->data, out_frame->linesize);
        }

        public static int GetBufferSize(int nb_channels, int nb_samples, AVSampleFormat fmt = AVSampleFormat.AV_SAMPLE_FMT_S16)
        {
            int outSize = -1;
            return ffmpeg.av_samples_get_buffer_size(&outSize, nb_channels, nb_samples, fmt, 0);
            //bufferSize = (uint)outSize;
            //return ret;
        }

        public static void AllocBuffer(out AVBufferRef* buffer, out int size, int nb_channels, int nb_samples, AVSampleFormat fmt = AVSampleFormat.AV_SAMPLE_FMT_S16)
        {
            buffer = null;
            size = GetBufferSize(nb_channels, nb_samples, fmt);
            //size = buf_size;
            if (size < 0) { return; }
            buffer = ffmpeg.av_buffer_alloc((ulong)size);
        }

        public static AVSampleFormat ConvertFormat(AVSampleFormat inFmt)
        {
            AVSampleFormat outFmt = 0;
            switch (inFmt)
            {
                case AVSampleFormat.AV_SAMPLE_FMT_U8P:
                case AVSampleFormat.AV_SAMPLE_FMT_U8:
                    outFmt = AVSampleFormat.AV_SAMPLE_FMT_U8;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S16:
                case AVSampleFormat.AV_SAMPLE_FMT_S16P:
                    outFmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S32:
                case AVSampleFormat.AV_SAMPLE_FMT_S32P:
                    outFmt = AVSampleFormat.AV_SAMPLE_FMT_S32;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S64:
                case AVSampleFormat.AV_SAMPLE_FMT_S64P:
                    outFmt = AVSampleFormat.AV_SAMPLE_FMT_S64;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_FLT:
                case AVSampleFormat.AV_SAMPLE_FMT_FLTP:
                    outFmt = AVSampleFormat.AV_SAMPLE_FMT_FLT;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_DBL:
                case AVSampleFormat.AV_SAMPLE_FMT_DBLP:
                    outFmt = AVSampleFormat.AV_SAMPLE_FMT_DBL;
                    break;
                default:
                    break;
            }
            return outFmt;
        }

        /// <summary>
        /// 解析单字节长度
        /// </summary>
        /// <param name="fmt"></param>
        /// <param name="per_sample_size"></param>
        /// <param name="isPlanar"></param>
        public static void GetInfoFromFormat(AVSampleFormat fmt, out int per_sample_size, out bool isPlanar)
        {
            per_sample_size = -1;
            isPlanar = false;
            // 判断单个采样点大小
            switch (fmt)
            {
                case AVSampleFormat.AV_SAMPLE_FMT_U8:
                case AVSampleFormat.AV_SAMPLE_FMT_U8P:
                    per_sample_size = 1;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S16:
                case AVSampleFormat.AV_SAMPLE_FMT_S16P:
                    per_sample_size = 2;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S32:
                case AVSampleFormat.AV_SAMPLE_FMT_S32P:
                case AVSampleFormat.AV_SAMPLE_FMT_FLT:
                case AVSampleFormat.AV_SAMPLE_FMT_FLTP:
                    per_sample_size = 4;
                    break;
                case AVSampleFormat.AV_SAMPLE_FMT_S64:
                case AVSampleFormat.AV_SAMPLE_FMT_S64P:
                case AVSampleFormat.AV_SAMPLE_FMT_DBL:
                case AVSampleFormat.AV_SAMPLE_FMT_DBLP:
                    per_sample_size = 8;
                    break;
                default:
                    break;
            }
            // 判断 是否 planar
            switch (fmt)
            {
                case AVSampleFormat.AV_SAMPLE_FMT_U8P:
                case AVSampleFormat.AV_SAMPLE_FMT_S16P:
                case AVSampleFormat.AV_SAMPLE_FMT_S32P:
                case AVSampleFormat.AV_SAMPLE_FMT_S64P:
                case AVSampleFormat.AV_SAMPLE_FMT_FLTP:
                case AVSampleFormat.AV_SAMPLE_FMT_DBLP:
                    isPlanar = true;
                    break;
                default:
                    break;
            }
        }

        /// <summary>
        /// 释放非托管数据
        /// </summary>
        /// <param name="data"></param>
        public static void FreeData(void* data)
        {
            //Marshal.FreeHGlobal(new IntPtr(data));
            ffmpeg.av_free(data);
        }

        /// <summary>
        /// 调整播放进度
        /// </summary>
        /// <param name="inCtx"></param>
        /// <param name="seekMS">跳转时间(ms)</param>
        /// <returns></returns>
        public static bool SeekStream(FAVFormatContext inCtx, TimeSpan time, int streamIndex = -1)
        {
            long timestamp = (long)(time.TotalSeconds * ffmpeg.AV_TIME_BASE);
            return SeekStream(inCtx, timestamp, streamIndex);
        }

        public static bool SeekStream(FAVFormatContext inCtx, long timestamp, int streamIndex = -1)
        {
            if (streamIndex >= 0)
                timestamp = ffmpeg.av_rescale_q(timestamp, ffmpeg.av_get_time_base_q(), inCtx.Streams[streamIndex].TimeBase);

            int ret = ffmpeg.avformat_seek_file(inCtx.FmtCtx, streamIndex, long.MinValue, timestamp, long.MaxValue, 0);
            if (ret < 0) { return false; }

            if (streamIndex >= 0)
                ffmpeg.avcodec_flush_buffers(inCtx.Streams[streamIndex].CodecCtx);
            else
                foreach (var stream in inCtx.Streams.Values)
                    ffmpeg.avcodec_flush_buffers(stream.CodecCtx);

            return true;
        }

        #endregion

        /// <summary>
        /// 初始化
        /// </summary>
        public static void FFmpegLibInit()
        {
            if (!FFmpegInitialization)
            {
                lock (FFmpegLogLock)
                {
                    if (!FFmpegInitialization)
                    {
                        ffmpeg.avdevice_register_all();
                        ffmpeg.avformat_network_init();
                        ffmpeg.av_log_set_flags(ffmpeg.AV_LOG_SKIP_REPEATED);

                        int ffmpegLogLevel = Debugger.IsAttached ? ffmpeg.AV_LOG_VERBOSE : ffmpeg.AV_LOG_WARNING;
                        ffmpeg.av_log_set_level(ffmpegLogLevel);

                        //FFmpegLogCallback = OnFFmpegMessageLogged;
                        //ffmpeg.av_log_set_callback(FFmpegLogCallback);
                        FFmpegInitialization = true;
                    }
                }
            }
        }

        public static string GetErrorMessage(int error)
        {
            int bufferSize = 1024;
            byte* buffer = (byte*)Marshal.AllocHGlobal(bufferSize);
            ffmpeg.av_strerror(error, buffer, (ulong)bufferSize);
            string info = Marshal.PtrToStringAnsi((IntPtr)buffer);
            Marshal.FreeHGlobal((IntPtr)buffer);
            return info;
        }


        /// <summary>
        /// ffmpeg 日志回调
        /// </summary>
        /// <param name="p0"></param>
        /// <param name="level"></param>
        /// <param name="format"></param>
        /// <param name="vl"></param>
        private static unsafe void OnFFmpegMessageLogged(void* p0, int level, string format, byte* vl)
        {
            string info = null;
            const int lineSize = 1024;
            lock (FFmpegLogLock)
            {
                int curLevel = ffmpeg.av_log_get_level();
                if (level > curLevel) return;
                byte* lineBuffer = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                info = Marshal.PtrToStringUTF8(new IntPtr(lineBuffer));
            }

            if (!string.IsNullOrWhiteSpace(info) && DebugLog != null)
                DebugLog.Invoke(info);
        }


    }
}