﻿using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
using FFmpeg.AutoGen;
using FFmpegVideoClip.Configuration;
using static FFmpegVideoClip.Lib.Controls.TimeLineThumbs;
using Size = System.Drawing.Size;

namespace FFmpegVideoClip.Lib
{

    internal sealed unsafe class VideoStreamDecoder : IDisposable
    {
        public readonly AVCodecContext* _pCodecContext;
        public readonly AVFormatContext* _pFormatContext;
        private readonly AVFrame* _pFrame;
        private readonly AVPacket* _pPacket;
        private readonly AVFrame* _receivedFrame;
        public readonly AVStream* VideoStream;
        private readonly int _streamIndex;
        private string FilePath = string.Empty;

        public string CodecName { get; }
        public Size FrameSize { get; }
        public AVPixelFormat PixelFormat { get; }

        public FFmpegExtractFrames? Owner { get; }
        public VideoStreamDecoder(FFmpegExtractFrames m_Owner, AVHWDeviceType HWDeviceType) : this(m_Owner.FilePath, HWDeviceType) { Owner = m_Owner; }

        public VideoStreamDecoder(string FileFullPath, AVHWDeviceType HWDeviceType)
        {
            FilePath = FileFullPath;
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame = ffmpeg.av_frame_alloc();
            AVFormatContext* pFormatContext = _pFormatContext;
            ffmpeg.avformat_open_input(&pFormatContext, FilePath, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec* codec = null;
            _streamIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0).ThrowExceptionIfError();
            VideoStream = _pFormatContext->streams[_streamIndex];

            _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);

            ffmpeg.avcodec_parameters_to_context(_pCodecContext, VideoStream->codecpar).ThrowExceptionIfError();

            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
                ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            
            ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();

            CodecName = ffmpeg.avcodec_get_name(codec->id);
            FrameSize = new Size(_pCodecContext->width, _pCodecContext->height);
            PixelFormat = _pCodecContext->pix_fmt;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame = ffmpeg.av_frame_alloc();
        }

        public bool TryDecodeNextFrame(out AVFrame frame)
        {
            ffmpeg.av_frame_unref(_pFrame);
            ffmpeg.av_frame_unref(_receivedFrame);
            int error;
            do
            {
                try
                {
                    do
                    {
                        ffmpeg.av_packet_unref(_pPacket);
                        error = ffmpeg.av_read_frame(_pFormatContext, _pPacket);

                        if (error == ffmpeg.AVERROR_EOF)
                        {
                            frame = *_pFrame;
                            if (Owner != null)
                                LogManage.WriteLineNewInfo($"抽帧失败:{FFmpegHelper.av_strerror(error)}-> TotalFrameCount:[{Owner.CurrentExtractFramesTimes?.Length}]  Index:[{Owner.CurrentExtractFramesIndex}]  Time:[{Owner.CurrentExtractFramesTime}]", true);
                            else
                                LogManage.WriteLineNewInfo($"文件：[{FilePath}]->解码帧失败:{FFmpegHelper.av_strerror(error)}", true);
                            return false;
                        }

                        error.ThrowExceptionIfError();
                    } while (_pPacket->stream_index != _streamIndex);

                    ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket).ThrowExceptionIfError();
                }
                finally
                {
                    ffmpeg.av_packet_unref(_pPacket);
                }

                error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);
            } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

            error.ThrowExceptionIfError();

            if (_pCodecContext->hw_device_ctx != null)
            {
                int ret = ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0);
                if (ret == ffmpeg.AVERROR(ffmpeg.EINVAL))
                {
                    frame = *_pFrame;
                    frame.time_base = VideoStream->time_base;
                }
                else if (ret == 0)
                {
                    frame                       = *_receivedFrame;
                    frame.sample_aspect_ratio   = _pFrame->sample_aspect_ratio;
                    frame.metadata              = _pFrame->metadata;
                    frame.time_base             = VideoStream->time_base;
                    frame.pts                   = _pFrame->pts;
                    frame.best_effort_timestamp = _pFrame->best_effort_timestamp;
                    frame.pkt_dts               = _pFrame->pkt_dts;
                    frame.duration              = _pFrame->duration;
                    frame.flags                 = _pFrame->flags;
                    frame.pict_type             = _pFrame->pict_type;
#pragma warning disable CS0618 // 类型或成员已过时
                    frame.key_frame             = _pFrame->key_frame;
                    frame.pkt_pos               = _pFrame->pkt_pos;
                    frame.pkt_size              = _pFrame->pkt_size;
#pragma warning restore CS0618 // 类型或成员已过时
                }
                else throw new ApplicationException(FFmpegHelper.av_strerror(ret));
            }
            else
            {
                frame = *_pFrame;
                frame.time_base = VideoStream->time_base;
            }
            return true;
        }
        private static readonly AVRational AV_TIME_BASE_Q = new AVRational() { num = 1, den = ffmpeg.AV_TIME_BASE };

        public bool Seek(TimeSpan Time,in int SeekFlags)
        {
            long timestamp = (long)(Time.TotalSeconds * VideoStream->time_base.den / VideoStream->time_base.num);
            int result = 0;
            if ((result = ffmpeg.av_seek_frame(_pFormatContext, _streamIndex, timestamp, SeekFlags)) > 0)
                LogManage.WriteLineNewInfo($"抽帧Seek失败：{FFmpegHelper.av_strerror(result)}", true);
            else
            {
                ffmpeg.avcodec_flush_buffers(_pCodecContext);
                ffmpeg.av_packet_unref(_pPacket);
                return true;
            }
            return false;
        }

        private long GetTimetamp(TimeSpan Time) => ffmpeg.av_rescale_q((long)(Time.TotalSeconds * ffmpeg.AV_TIME_BASE), AV_TIME_BASE_Q, VideoStream->time_base);

        public IReadOnlyDictionary<string, string> GetContextInfo()
        {
            AVDictionaryEntry* tag = null;
            Dictionary<string, string> result = new Dictionary<string, string>();

            while ((tag = ffmpeg.av_dict_get(_pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
            {
                string key = Marshal.PtrToStringAnsi((IntPtr)tag->key) ?? string.Empty;
                string value = Marshal.PtrToStringAnsi((IntPtr)tag->value) ?? string.Empty;
                result[key] = value;
            }

            return result;
        }


        private bool disposedValue = false;
        private void m_Dispose()
        {
            if (!disposedValue)
            {
                AVFrame* pFrame = _pFrame;
                ffmpeg.av_frame_free(&pFrame);
                pFrame = _receivedFrame;
                ffmpeg.av_frame_free(&pFrame);

                AVPacket* pPacket = _pPacket;
                
                ffmpeg.av_packet_free(&pPacket);
                
                AVCodecContext* pCodecContext = _pCodecContext;
                ffmpeg.avcodec_free_context(&pCodecContext);

                AVFormatContext* pFormatContext = _pFormatContext;
                ffmpeg.avformat_close_input(&pFormatContext);

                disposedValue = true;
            }
        }
        ~VideoStreamDecoder() { m_Dispose(); }
        public void Dispose() { m_Dispose(); GC.SuppressFinalize(this); }
    }

    internal sealed unsafe class VideoFrameConverter : IDisposable
    {
        private readonly nint _convertedFrameBufferPtr;
        private readonly Size _destinationSize;
        private readonly byte_ptrArray4 _dstData;
        private readonly int_array4 _dstLinesize;
        private SwsContext* _pConvertContext;
        //private readonly DpiScale Dpi;

        private readonly Size sourceSize;
        private readonly AVPixelFormat destinationPixelFormat;

        public VideoFrameConverter(Size sourceSize, Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            //Dpi = VisualTreeHelper.GetDpi(Application.Current.MainWindow);
            
            _destinationSize = destinationSize;
            
            this.sourceSize = sourceSize;
            this.destinationPixelFormat = destinationPixelFormat;

            nint convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);
            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            _dstData = new byte_ptrArray4();
            _dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte*)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);
        }

        public void InitConvertContext(AVPixelFormat sourcePixelFormat)
        {
            if (_pConvertContext != null)
                return;

            _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat, _destinationSize.Width, _destinationSize.Height, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (_pConvertContext == null)
                throw new ApplicationException($"创建[{typeof(SwsContext)}]失败，SWidth:[{sourceSize.Width}]-SHeight:[{sourceSize.Height}]-SPixelFormat:[{sourcePixelFormat}]-DWidth:[{_destinationSize.Width}]-DHeight:[{_destinationSize.Height}]-DPixelFormat:[{destinationPixelFormat}]-SWS_FAST_BILINEAR:[{nameof(ffmpeg.SWS_FAST_BILINEAR)}]");
        }

        public AVFrame Convert(AVFrame sourceFrame)
        {
            InitConvertContext((AVPixelFormat)sourceFrame.format);
            ffmpeg.sws_scale(_pConvertContext, sourceFrame.data, sourceFrame.linesize, 0, sourceFrame.height, _dstData, _dstLinesize);
            byte_ptrArray8 data = default;
            int_array8 linesize = default;

            data.UpdateFrom(_dstData);
            linesize.UpdateFrom(_dstLinesize);

            AVFrame result = new AVFrame
            {
                sample_aspect_ratio   = sourceFrame.sample_aspect_ratio,
                data                  = data,
                linesize              = linesize,
                width                 = _destinationSize.Width,
                height                = _destinationSize.Height,
                metadata              = sourceFrame.metadata,
                time_base             = sourceFrame.time_base,
                pts                   = sourceFrame.pts,
                best_effort_timestamp = sourceFrame.best_effort_timestamp,
                pkt_dts               = sourceFrame.pkt_dts,
                duration              = sourceFrame.duration,
                flags                 = sourceFrame.flags,
                pict_type             = sourceFrame.pict_type,
#pragma warning disable CS0618 // 类型或成员已过时
                key_frame = sourceFrame.key_frame,
                pkt_pos   = sourceFrame.pkt_pos,
                pkt_size  = sourceFrame.pkt_size
#pragma warning restore CS0618 // 类型或成员已过时
            };
            return result;
        }

        private bool disposedValue = false;
        private void m_Dispose()
        {
            if (!disposedValue)
            {
                if (_convertedFrameBufferPtr != nint.Zero)
                    Marshal.FreeHGlobal(_convertedFrameBufferPtr);
                if (_pConvertContext != null)
                    ffmpeg.sws_freeContext(_pConvertContext);
                disposedValue = true;
            }
        }
        ~VideoFrameConverter() { m_Dispose(); }
        public void Dispose() { m_Dispose(); GC.SuppressFinalize(this); }
    }


    internal struct GetFrameTimeInfo
    {
        public readonly TimeSpan Time;
        public readonly bool IsKeyFrame;
        public GetFrameTimeInfo(TimeSpan time, bool isKeyFrame = false) { Time = time; IsKeyFrame = isKeyFrame; }
    }
    internal struct FrameTimeInfo
    {
        public TimeSpan GetTime;
        public TimeSpan Start;
        public TimeSpan End;
        public TimeSpan NextStart;
        public bool IsHasValue => Start > TimeSpan.Zero && End > TimeSpan.Zero && NextStart > TimeSpan.Zero;
    }
    internal sealed class GetFrameData : IDisposable
    {
        private AVHWDeviceType HWDevice;
        private VideoStreamDecoder vsd;
        private bool disposedValue;
        private static TimeSpan GetTime(ref AVFrame frame) => TimeSpan.FromSeconds(frame.pts * ffmpeg.av_q2d(frame.time_base));

        public GetFrameData(string FilePath)
        {
            HWDevice = Config.ThumbnailHardwareDecoding;
            vsd = new VideoStreamDecoder(FilePath, HWDevice);
        }
        public unsafe FrameTimeInfo[] GetFrameTime(in ReadOnlySpan<GetFrameTimeInfo> Times)
        {
            FrameTimeInfo[] FrameTimes = new FrameTimeInfo[Times.Length];
            int Index = 0;

            foreach (ref readonly GetFrameTimeInfo GetTimeInfo in Times)
            {
                ref FrameTimeInfo CurrentFrameTimeInfo = ref FrameTimes[Index++];
                CurrentFrameTimeInfo.GetTime = GetTimeInfo.Time;
                try
                {
                    if (GetTimeInfo.IsKeyFrame)
                        GetKeyFrameTime(vsd, GetTimeInfo.Time, ref CurrentFrameTimeInfo);
                    else
                        GetFrameTime(vsd, GetTimeInfo.Time, ref CurrentFrameTimeInfo);
                }
                catch (Exception exc)
                {
                    LogManage.WriteExceptionInfo($"请求帧数据异常,请求时间:{GetTimeInfo.Time.ToString(Tools.TimeSpan_hhmmssfff)}", exc);
                }
            }
            return FrameTimes;
        }

        private static unsafe void GetFrameTime(VideoStreamDecoder vsd, TimeSpan time, ref FrameTimeInfo CurrentFrameTimeInfo)
        {
            TimeSpan LastFramesTime = TimeSpan.Zero;
            TimeSpan CurrentFramesTime = TimeSpan.Zero;

            if (vsd.Seek(time, ffmpeg.AVSEEK_FLAG_BACKWARD))
            {
                while (vsd.TryDecodeNextFrame(out AVFrame frame))
                {
                    CurrentFramesTime = GetTime(ref frame);

                    //当前帧的时间，超过给定时间点，则此当前帧的时间，是给定时间所在帧的，下一帧的开始时间点
                    if (CurrentFramesTime > time)
                    {
                        CurrentFrameTimeInfo.Start = LastFramesTime;
                        CurrentFrameTimeInfo.End = CurrentFramesTime - Tools.OneMillisecond;
                        CurrentFrameTimeInfo.NextStart = CurrentFramesTime;
                        break;
                    }
                    LastFramesTime = CurrentFramesTime;
                }
            }
        }

        private static unsafe void GetKeyFrameTime(VideoStreamDecoder vsd, TimeSpan time, ref FrameTimeInfo CurrentFrameTimeInfo)
        {
            TimeSpan CurrentFramesTime = TimeSpan.Zero;
            if (vsd.Seek(time, ffmpeg.AVSEEK_FLAG_BACKWARD) && vsd.TryDecodeNextFrame(out AVFrame frame))
            {
                CurrentFramesTime = GetTime(ref frame);
                CurrentFrameTimeInfo.Start = CurrentFramesTime;
            }
            while (vsd.TryDecodeNextFrame(out frame))
            {
                if (frame.pict_type != AVPictureType.AV_PICTURE_TYPE_I)
                    continue;
                CurrentFramesTime = GetTime(ref frame);
                CurrentFrameTimeInfo.NextStart = CurrentFramesTime;
                return;
            }
        }

        private void OnDispose()
        {
            if (!disposedValue)
            {
                vsd.Dispose();
                vsd = null!;
                disposedValue = true;
            }
        }
        ~GetFrameData() { OnDispose(); }
        public void Dispose() { OnDispose(); GC.SuppressFinalize(this); }

        public static FrameTimeInfo[] GetFramesTimes(string FilePath, in ReadOnlySpan<GetFrameTimeInfo> Times)
        {
            using (GetFrameData gfd = new(FilePath))
            {
                return gfd.GetFrameTime(Times);
            }
        }
    }



    internal sealed class FFmpegExtractFrames : IDisposable
    {
        private static AVPixelFormat GetHWPixelFormat(AVHWDeviceType hWDevice) => hWDevice switch
        {
            AVHWDeviceType.AV_HWDEVICE_TYPE_NONE => AVPixelFormat.AV_PIX_FMT_NONE,
            AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU => AVPixelFormat.AV_PIX_FMT_VDPAU,
            AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA => AVPixelFormat.AV_PIX_FMT_CUDA,
            AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI => AVPixelFormat.AV_PIX_FMT_VAAPI,
            AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2 => AVPixelFormat.AV_PIX_FMT_NV12,
            AVHWDeviceType.AV_HWDEVICE_TYPE_QSV => AVPixelFormat.AV_PIX_FMT_QSV,
            AVHWDeviceType.AV_HWDEVICE_TYPE_VIDEOTOOLBOX => AVPixelFormat.AV_PIX_FMT_VIDEOTOOLBOX,
            AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA => AVPixelFormat.AV_PIX_FMT_NV12,
            AVHWDeviceType.AV_HWDEVICE_TYPE_DRM => AVPixelFormat.AV_PIX_FMT_DRM_PRIME,
            AVHWDeviceType.AV_HWDEVICE_TYPE_OPENCL => AVPixelFormat.AV_PIX_FMT_OPENCL,
            AVHWDeviceType.AV_HWDEVICE_TYPE_MEDIACODEC => AVPixelFormat.AV_PIX_FMT_MEDIACODEC,
            AVHWDeviceType.AV_HWDEVICE_TYPE_D3D12VA => AVPixelFormat.AV_PIX_FMT_NV12,
            _ => AVPixelFormat.AV_PIX_FMT_NONE
        };
        private static TimeSpan GetTime(ref AVFrame frame) => TimeSpan.FromSeconds(frame.pts * ffmpeg.av_q2d(frame.time_base));

        private object SyncRoot = new object();
        private AVHWDeviceType HWDevice;
        private AVPixelFormat destinationPixelFormat;
        private Size sourceSize;
        private Size destinationSize;
        private VideoStreamDecoder vsd;
        private VideoFrameConverter vfc;

        private bool disposedValue;
        public TimeSpan[]? CurrentExtractFramesTimes { get; private set; }
        public TimeSpan CurrentExtractFramesTime => CurrentExtractFramesIndex != null && CurrentExtractFramesTimes != null ? CurrentExtractFramesTimes[CurrentExtractFramesIndex.Value] : TimeSpan.Zero;
        public int? CurrentExtractFramesIndex { get; private set; }

        public string FilePath { get; private set; }

        public bool IsRuning { get; private set; } = false;


        public unsafe FFmpegExtractFrames(string url, Int32Rect DestinationRect)
        {
            FilePath = url;
            HWDevice = Config.ThumbnailHardwareDecoding;
            destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_RGB24;
            vsd = new VideoStreamDecoder(this, HWDevice);
            sourceSize = vsd.FrameSize;
            //sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice);
            destinationSize = new(DestinationRect.Width, DestinationRect.Height);
            vfc = new VideoFrameConverter(sourceSize, destinationSize, destinationPixelFormat);
            //drawText = new DrawText2(vsd.VideoStream, vsd._pCodecContext, destinationSize);
        }


        private unsafe void InternalExtractFrames(TimeSpan[] Times, WriteableBitmapCollection bitmaps, ThumbCancelSource CancellSource = null!)
        {
            if (Volatile.Read(ref disposedValue) || vsd == null)
                return;
            if (CancellSource == null)
                CancellSource = ThumbCancelSource.Create();
            IsRuning = true;
            CurrentExtractFramesTimes = Times;
            TimeSpan CurrentFramesTime;
            CurrentExtractFramesIndex = 0;
            try
            {
                foreach (TimeSpan time in Times)
                {
                    if (CancellSource.IsCancellationRequested) return;
                    if (vsd.Seek(time, ffmpeg.AVSEEK_FLAG_BACKWARD))
                    {
                        if (CancellSource.IsCancellationRequested) return;

                        while (vsd.TryDecodeNextFrame(out AVFrame frame))
                        {
                            if (CancellSource.IsCancellationRequested) return;
                            CurrentFramesTime = GetTime(ref frame);
                            if (CurrentFramesTime < time)
                                continue;
                            AVFrame convertedFrame = vfc!.Convert(frame);
                            DrawTools.TimeWriteToWriteableBitmap(bitmaps[CurrentExtractFramesIndex.Value], convertedFrame, bitmaps.SizeRect, convertedFrame.linesize[0]);
                            LogManage.WriteLineNewInfo($"请求时间:{time.ToString(Tools.TimeSpan_hhmmssfff)},获得时间:{CurrentFramesTime.ToString(Tools.TimeSpan_hhmmssfff)}");
                            break;
                        }
                        //if (vsd.TryDecodeNextFrame(out AVFrame frame))
                        //{
                        //    if (CancellSource.IsCancellationRequested) return;

                        //    AVFrame convertedFrame = vfc!.Convert(frame);
                        //    DrawTools.TimeWriteToWriteableBitmap(bitmaps[CurrentExtractFramesIndex.Value], convertedFrame, bitmaps.SizeRect, convertedFrame.linesize[0]);
                        //    LogManage.WriteLineNewInfo($"请求时间:{time.ToString(Tools.TimeSpan_hhmmssfff)},获得时间:{TimeSpan.FromSeconds(frame.pts * ffmpeg.av_q2d(frame.time_base)).ToString(Tools.TimeSpan_hhmmssfff)}", true);
                        //}
                    }
                    CurrentExtractFramesIndex++;
                }

                /* 库案例中的原始代码
                int frameNumber = 0;
                while (vsd.TryDecodeNextFrame(out var frame))
                {
                    AVFrame convertedFrame = vfc.Convert(frame);

                    using (Bitmap bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]))
                        bitmap.Save($"frames/frame.{frameNumber:D8}.jpg", ImageFormat.Jpeg);
                    frameNumber++;
                }
                */
            }
            catch (Exception ex) { LogManage.WriteExceptionInfo("FFmpeg解码错误:", ex, true); }
            finally { IsRuning = false; CurrentExtractFramesTimes = null; CurrentExtractFramesIndex = null; }

        }
        public void ExtractFrames(TimeSpan[] Times, WriteableBitmapCollection bitmaps, ThumbCancelSource CancellSource = null!)
        {
            lock (SyncRoot)
            {
                InternalExtractFrames(Times, bitmaps, CancellSource);
            }
        }

        public Task WaitExtractFramesCompleteAsync() => Task.Run(() => { while (IsRuning) Thread.Sleep(100); });

        private void OnDispose()
        {
            if (!Interlocked.CompareExchange(ref disposedValue, true, false))
            {
                if (IsRuning)
                    WaitExtractFramesCompleteAsync().ConfigureAwait(false).GetAwaiter().GetResult();
                if (vfc != null)
                {
                    vfc.Dispose();
                    vfc = null!;
                }
                if (vsd != null)
                {
                    vsd.Dispose();
                    vsd = null!;
                }
                //drawText.Dispose();
                //drawText = null!;
                SyncRoot = null!;
            }
        }
        ~FFmpegExtractFrames() { OnDispose(); }
        public void Dispose() => Task.Run(() => { OnDispose(); GC.SuppressFinalize(this); });

    }

    /*
        internal sealed unsafe class DrawText : IDisposable
        {
            private AVFilterGraph* filterGraph;
            private AVFilterInOut* outputs;
            private AVFilterInOut* inputs;
            private AVFrame* pFilteredFrame;
            AVFilterContext* srcFilterCtx;
            AVFilterContext* sinkFilterCtx;
            private bool disposedValue;
            private int ret;
            private bool IsInit = false;

            public DrawText(AVStream* VideoStream, AVCodecContext* _pCodecContext, Size _destinationSize)
            {
                AVFilterGraph* filterGraph = ffmpeg.avfilter_graph_alloc();

                string args = $"video_size={_destinationSize.Width}x{_destinationSize.Height}:pix_fmt={(int)AVPixelFormat.AV_PIX_FMT_RGB24
                    }:time_base={VideoStream->time_base.num}/{VideoStream->time_base.den}:pixel_aspect={_pCodecContext->sample_aspect_ratio.num}/{_pCodecContext->sample_aspect_ratio.den}";

                AVFilterContext* srcFilterCtx;
                AVFilter* srcFilter = ffmpeg.avfilter_get_by_name("buffer");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&srcFilterCtx, srcFilter, "out_buffer", args, null, filterGraph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Error creating buffer source]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }

                AVFilterContext* sinkFilterCtx;
                AVFilter* sinkFilter = ffmpeg.avfilter_get_by_name("buffersink");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&sinkFilterCtx, sinkFilter, "in_buffersink", string.Empty, null, filterGraph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Error creating buffer sink]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }


                AVFilterInOut* inputs = ffmpeg.avfilter_inout_alloc();
                inputs->name = ffmpeg.av_strdup("out");
                inputs->filter_ctx = srcFilterCtx;
                inputs->pad_idx = 0;
                //outputs->next = null;

                AVFilterInOut* outputs = ffmpeg.avfilter_inout_alloc();
                outputs->name = ffmpeg.av_strdup("in");
                outputs->filter_ctx = sinkFilterCtx;
                outputs->pad_idx = 0;
                //inputs->next = null;

                if ((ret = ffmpeg.avfilter_graph_parse_ptr(filterGraph, "drawtext=text='%{pts\\:hms}':x=1:y=1:fontsize=12:fontcolor=white:bordercolor=black:borderw=1", &inputs, &outputs, null)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Could not parse filter graph]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }

                if ((ret = ffmpeg.avfilter_graph_config(filterGraph, null)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Could not configure filter graph]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }

                this.filterGraph = filterGraph;
                this.inputs = inputs;
                this.outputs = outputs;
                this.srcFilterCtx = srcFilterCtx;
                this.sinkFilterCtx = sinkFilterCtx;
                pFilteredFrame = ffmpeg.av_frame_alloc();
                IsInit = true;
            }


            public bool TryDrawTimeText(ref AVFrame frame)
            {
                if (!IsInit) return false;
                AVFrame inFrame = frame;
                ffmpeg.av_frame_unref(pFilteredFrame);
                if ((ret = ffmpeg.av_buffersrc_add_frame(srcFilterCtx, &inFrame)) < 0)
                {
                    LogManage.WriteLineNewInfo($"错误:[Error adding frame to buffer source]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }
                if ((ret = ffmpeg.av_buffersink_get_frame(sinkFilterCtx, pFilteredFrame)) == 0)
                {
                    frame = *pFilteredFrame;
                    return true;
                }
                if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
                    return false;

                LogManage.WriteLineNewInfo($"错误:[Error getting filtered frame from buffer sink]:[{FFmpegHelper.av_strerror(ret)}]", true);
                return false;
            }

            private void OnDispose()
            {
                if (!disposedValue)
                {

                    var filterGraph = this.filterGraph;
                    var outputs = this.outputs;
                    var inputs = this.inputs;
                    var pFilteredFrame = this.pFilteredFrame;

                    ffmpeg.avfilter_inout_free(&outputs);
                    ffmpeg.avfilter_inout_free(&inputs);

                    ffmpeg.avfilter_free(srcFilterCtx);
                    ffmpeg.avfilter_free(sinkFilterCtx);

                    ffmpeg.avfilter_graph_free(&filterGraph);

                    ffmpeg.av_frame_unref(pFilteredFrame);
                    ffmpeg.av_frame_free(&pFilteredFrame);

                    disposedValue = true;
                }
            }
            ~DrawText() { OnDispose(); }
            public void Dispose() { OnDispose(); GC.SuppressFinalize(this); }
        }

        internal unsafe class DrawText2 : IDisposable
        {
            private AVFilterGraph* filterGraph;
            private AVFilterContext* srcFilterCtx;
            private AVFilterContext* sinkFilterCtx;
            private AVFilterContext* drawtextContext;
            private AVFrame* pFilteredFrame;
            private bool IsInit = false;
            private bool disposedValue;
            private int ret;
            public DrawText2(AVStream* VideoStream, AVCodecContext* _pCodecContext, Size _destinationSize)
            {
                // 创建过滤器图
                AVFilterGraph* filterGraph = ffmpeg.avfilter_graph_alloc();

                string args = $"video_size={_destinationSize.Width}x{_destinationSize.Height}:pix_fmt={(int)AVPixelFormat.AV_PIX_FMT_RGB24}:time_base={VideoStream->time_base.num}/{VideoStream->time_base.den}:pixel_aspect={_pCodecContext->sample_aspect_ratio.num}/{_pCodecContext->sample_aspect_ratio.den}";

                AVFilterContext* srcFilterCtx;
                AVFilter* srcFilter = ffmpeg.avfilter_get_by_name("buffer");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&srcFilterCtx, srcFilter, "in", args, null, filterGraph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Error creating buffer source]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }

                AVFilterContext* sinkFilterCtx;
                AVFilter* sinkFilter = ffmpeg.avfilter_get_by_name("buffersink");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&sinkFilterCtx, sinkFilter, "out", string.Empty, null, filterGraph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Error creating buffer sink]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }


                // 添加drawtext过滤器到过滤器图中
                AVFilter* drawtextFilter = ffmpeg.avfilter_get_by_name("drawtext");
                AVFilterContext* drawtextContext = ffmpeg.avfilter_graph_alloc_filter(filterGraph, drawtextFilter, "drawtext");
                ffmpeg.av_opt_set(drawtextContext, "text", "%{pts\\:hms}", 0);
                //ffmpeg.av_opt_set(drawtextContext, "fontfile", "arial.ttf", 0);
                ffmpeg.av_opt_set(drawtextContext, "fontsize", "12", 0);
                ffmpeg.av_opt_set(drawtextContext, "fontcolor", "white", 0);
                ffmpeg.av_opt_set(drawtextContext, "x", "1", 0);
                ffmpeg.av_opt_set(drawtextContext, "y", "1", 0);
                ffmpeg.av_opt_set(drawtextContext, "bordercolor", "black", 0);
                ffmpeg.av_opt_set(drawtextContext, "borderw", "1", 0);

                // 配置过滤器图
                //ffmpeg.av_buffersink_get_frame()
                ffmpeg.avfilter_link(srcFilterCtx, 0, drawtextContext, 0);
                ffmpeg.avfilter_link(drawtextContext, 0, sinkFilterCtx, 0);
                ffmpeg.avfilter_graph_config(filterGraph, null);


                this.filterGraph = filterGraph;
                this.srcFilterCtx = srcFilterCtx;
                this.sinkFilterCtx = sinkFilterCtx;
                this.drawtextContext = drawtextContext;
                pFilteredFrame = ffmpeg.av_frame_alloc();
                IsInit = true;


                // 解码视频帧并绘制时间码
                //AVPacket packet = new AVPacket();
                //AVFrame* frame = ffmpeg.av_frame_alloc();
                //while (ffmpeg.av_read_frame(formatContext, &packet) >= 0)
                //{
                //    if (packet.stream_index == videoStreamIndex)
                //    {
                //        ffmpeg.avcodec_send_packet(codecContext, &packet);
                //        while (ffmpeg.avcodec_receive_frame(codecContext, frame) == 0)
                //        {
                //            ffmpeg.av_buffersrc_add_frame_flags(ffmpeg.av_buffersrc_get_by_name("in"), frame, ffmpeg.AV_BUFFERSRC_FLAG_KEEP_REF);
                //            AVFrame* filteredFrame = ffmpeg.av_frame_alloc();
                //            while (ffmpeg.av_buffersink_get_frame(ffmpeg.av_buffersink_get_by_name("out"), filteredFrame) == 0)
                //            {
                //                在filteredFrame上绘制时间码
                //                省略代码

                //                ffmpeg.av_frame_unref(filteredFrame);
                //            }
                //            ffmpeg.av_frame_free(&filteredFrame);
                //        }
                //    }
                //    ffmpeg.av_packet_unref(&packet);
                //}
                // 释放资源，省略代码

            }

            public bool TryDrawTimeText(ref AVFrame frame)
            {
                if (!IsInit) return false;
                AVFrame inFrame = frame;
                ffmpeg.av_frame_unref(pFilteredFrame);

                if ((ret = ffmpeg.av_buffersrc_add_frame(srcFilterCtx, &inFrame)) < 0)
                {
                    LogManage.WriteLineNewInfo($"错误:[Error adding frame to buffer source]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }
                if ((ret = ffmpeg.av_buffersink_get_frame(sinkFilterCtx, pFilteredFrame)) == 0)
                {
                    frame = *pFilteredFrame;
                    return true;
                }
                if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
                    return false;

                LogManage.WriteLineNewInfo($"错误:[Error getting filtered frame from buffer sink]:[{FFmpegHelper.av_strerror(ret)}]", true);
                return false;
            }


            private void OnDispose()
            {
                if (!disposedValue)
                {

                    var filterGraph = this.filterGraph;
                    //var outputs = this.outputs;
                    //var inputs = this.inputs;
                    var pFilteredFrame = this.pFilteredFrame;

                    //ffmpeg.avfilter_inout_free(&outputs);
                    //ffmpeg.avfilter_inout_free(&inputs);

                    ffmpeg.avfilter_free(srcFilterCtx);
                    ffmpeg.avfilter_free(sinkFilterCtx);
                    ffmpeg.avfilter_free(drawtextContext);


                    ffmpeg.avfilter_graph_free(&filterGraph);

                    ffmpeg.av_frame_unref(pFilteredFrame);
                    ffmpeg.av_frame_free(&pFilteredFrame);

                    disposedValue = true;
                }
            }
            ~DrawText2() { OnDispose(); }
            public void Dispose() { OnDispose(); GC.SuppressFinalize(this); }
        }

        internal unsafe class DrawText3
        {
            private static int ret;

            public static bool DrawText3Text(AVStream* VideoStream, AVCodecContext* _pCodecContext, Size _destinationSize,AVFrame* inFrame,out AVFrame ResultFrame) 
            {
                ResultFrame = default;
                string InArgs = $"video_size={_pCodecContext->width}x{_pCodecContext->height
                    }:pix_fmt={(int)_pCodecContext->pix_fmt
                    }:time_base={VideoStream->time_base.num}/{VideoStream->time_base.den
                    //}:frame_rate:{VideoStream->time_base.num}/{VideoStream->time_base.den
                    }:pixel_aspect={_pCodecContext->sample_aspect_ratio.num}/{_pCodecContext->sample_aspect_ratio.den}";
                //string OutArgs = $"pix_fmt={(int)AVPixelFormat.AV_PIX_FMT_RGB24}";

                AVFilterGraph* filter_graph = ffmpeg.avfilter_graph_alloc();

                AVFilterContext* buffersrc_ctx;
                AVFilter* buffersrc = ffmpeg.avfilter_get_by_name("buffer");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&buffersrc_ctx, buffersrc, "in", InArgs, null, filter_graph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[avfilter_graph_create_filter in]:[{FFmpegHelper.av_strerror(ret)}]->[{InArgs}]", true);
                    return false;
                }

                AVFilterContext* buffersink_ctx;
                AVFilter* buffersink = ffmpeg.avfilter_get_by_name("buffersink");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&buffersink_ctx, buffersink, "out", null, null, filter_graph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[avfilter_graph_create_filter out]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }

                //AVPixelFormat[] pix_fmts = { AVPixelFormat.AV_PIX_FMT_YUV420P, AVPixelFormat.AV_PIX_FMT_YUV420P, AVPixelFormat.AV_PIX_FMT_YUYV422, AVPixelFormat.AV_PIX_FMT_NONE };
                //ret = ffmpeg.av_opt_set_int_list(buffersink_ctx, "pix_fmts", pix_fmts,
                //                  AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN);

                AVFilterInOut* outputs = ffmpeg.avfilter_inout_alloc();
                outputs->name = ffmpeg.av_strdup("in");
                outputs->filter_ctx = buffersrc_ctx;
                outputs->pad_idx = 0;
                outputs->next = null;

                AVFilterInOut* inputs = ffmpeg.avfilter_inout_alloc();
                inputs->name = ffmpeg.av_strdup("out");
                inputs->filter_ctx = buffersink_ctx;
                inputs->pad_idx = 0;
                inputs->next = null;

                if ((ret = ffmpeg.avfilter_graph_parse_ptr(filter_graph, "drawtext=text='%{pts\\:hms}':x=1:y=1:fontsize=12:fontcolor=white:bordercolor=black:borderw=1", &inputs, &outputs, null)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[avfilter_graph_parse_ptr args]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }

                if ((ret = ffmpeg.avfilter_graph_config(filter_graph, null)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[avfilter_graph_config]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }


                   //3.1.1 配置滤镜图输入端和输出端
                    //buffer 滤镜和 buffersink 滤镜是两个特殊的视频滤镜，分别用于视频滤镜链的输入端和输出端。
                    //与之相似，abuffer 滤镜和 abuffersink 滤镜是两个特殊的音频滤镜，分别用于音频滤镜链的输入端和输出端。
                    //一个滤镜图可能由多个滤镜链构成，每个滤镜链的输入节点就是 buffer 滤镜，输出节点是 buffersink 滤镜，因此一个滤镜图可能有多个 buffer 滤镜，也可能有多个 buffersink 滤镜。
                    //应用程序通过访问 buffer 滤镜和 buffersink 滤镜实现和滤镜图的数据交互。


                AVFrame* ResultFramePtr = ffmpeg.av_frame_alloc();
                ffmpeg.av_frame_unref(ResultFramePtr);
                if ((ret = ffmpeg.av_buffersrc_add_frame(buffersrc_ctx, inFrame)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[av_buffersrc_add_frame]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }
                ret = ffmpeg.av_buffersink_get_frame(buffersink_ctx, ResultFramePtr);
                if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
                {

                }
                if (ret < 0)
                {
                    LogManage.WriteLineNewInfo($"[av_buffersink_get_frame]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }
                return true;
            }
        }
    */

    /*
        internal sealed unsafe class DrawText : IDisposable
        {
            private AVFilterGraph* filterGraph;
            private AVFilterInOut* outputs;
            private AVFilterInOut* inputs;
            private AVFrame* pFilteredFrame;
            AVFilterContext* srcFilterCtx;
            AVFilterContext* sinkFilterCtx;
            private bool disposedValue;
            private int ret;
            private bool IsInit = false;

            public DrawText(AVStream* VideoStream, AVCodecContext* _pCodecContext, Size _destinationSize)
            {
                AVFilterGraph* filterGraph = ffmpeg.avfilter_graph_alloc();

                string args = $"video_size={_destinationSize.Width}x{_destinationSize.Height}:pix_fmt={(int)AVPixelFormat.AV_PIX_FMT_RGB24
                    }:time_base={VideoStream->time_base.num}/{VideoStream->time_base.den}:pixel_aspect={_pCodecContext->sample_aspect_ratio.num}/{_pCodecContext->sample_aspect_ratio.den}";

                AVFilterContext* srcFilterCtx;
                AVFilter* srcFilter = ffmpeg.avfilter_get_by_name("buffer");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&srcFilterCtx, srcFilter, "out_buffer", args, null, filterGraph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Error creating buffer source]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }

                AVFilterContext* sinkFilterCtx;
                AVFilter* sinkFilter = ffmpeg.avfilter_get_by_name("buffersink");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&sinkFilterCtx, sinkFilter, "in_buffersink", string.Empty, null, filterGraph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Error creating buffer sink]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }


                AVFilterInOut* inputs = ffmpeg.avfilter_inout_alloc();
                inputs->name = ffmpeg.av_strdup("out");
                inputs->filter_ctx = srcFilterCtx;
                inputs->pad_idx = 0;
                //outputs->next = null;

                AVFilterInOut* outputs = ffmpeg.avfilter_inout_alloc();
                outputs->name = ffmpeg.av_strdup("in");
                outputs->filter_ctx = sinkFilterCtx;
                outputs->pad_idx = 0;
                //inputs->next = null;

                if ((ret = ffmpeg.avfilter_graph_parse_ptr(filterGraph, "drawtext=text='%{pts\\:hms}':x=1:y=1:fontsize=12:fontcolor=white:bordercolor=black:borderw=1", &inputs, &outputs, null)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Could not parse filter graph]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }

                if ((ret = ffmpeg.avfilter_graph_config(filterGraph, null)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Could not configure filter graph]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }

                this.filterGraph = filterGraph;
                this.inputs = inputs;
                this.outputs = outputs;
                this.srcFilterCtx = srcFilterCtx;
                this.sinkFilterCtx = sinkFilterCtx;
                pFilteredFrame = ffmpeg.av_frame_alloc();
                IsInit = true;
            }


            public bool TryDrawTimeText(ref AVFrame frame)
            {
                if (!IsInit) return false;
                AVFrame inFrame = frame;
                ffmpeg.av_frame_unref(pFilteredFrame);
                if ((ret = ffmpeg.av_buffersrc_add_frame(srcFilterCtx, &inFrame)) < 0)
                {
                    LogManage.WriteLineNewInfo($"错误:[Error adding frame to buffer source]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }
                if ((ret = ffmpeg.av_buffersink_get_frame(sinkFilterCtx, pFilteredFrame)) == 0)
                {
                    frame = *pFilteredFrame;
                    return true;
                }
                if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
                    return false;

                LogManage.WriteLineNewInfo($"错误:[Error getting filtered frame from buffer sink]:[{FFmpegHelper.av_strerror(ret)}]", true);
                return false;
            }

            private void OnDispose()
            {
                if (!disposedValue)
                {

                    var filterGraph = this.filterGraph;
                    var outputs = this.outputs;
                    var inputs = this.inputs;
                    var pFilteredFrame = this.pFilteredFrame;

                    ffmpeg.avfilter_inout_free(&outputs);
                    ffmpeg.avfilter_inout_free(&inputs);

                    ffmpeg.avfilter_free(srcFilterCtx);
                    ffmpeg.avfilter_free(sinkFilterCtx);

                    ffmpeg.avfilter_graph_free(&filterGraph);

                    ffmpeg.av_frame_unref(pFilteredFrame);
                    ffmpeg.av_frame_free(&pFilteredFrame);

                    disposedValue = true;
                }
            }
            ~DrawText() { OnDispose(); }
            public void Dispose() { OnDispose(); GC.SuppressFinalize(this); }
        }

        internal unsafe class DrawText2 : IDisposable
        {
            private AVFilterGraph* filterGraph;
            private AVFilterContext* srcFilterCtx;
            private AVFilterContext* sinkFilterCtx;
            private AVFilterContext* drawtextContext;
            private AVFrame* pFilteredFrame;
            private bool IsInit = false;
            private bool disposedValue;
            private int ret;
            public DrawText2(AVStream* VideoStream, AVCodecContext* _pCodecContext, Size _destinationSize)
            {
                // 创建过滤器图
                AVFilterGraph* filterGraph = ffmpeg.avfilter_graph_alloc();

                string args = $"video_size={_destinationSize.Width}x{_destinationSize.Height}:pix_fmt={(int)AVPixelFormat.AV_PIX_FMT_RGB24}:time_base={VideoStream->time_base.num}/{VideoStream->time_base.den}:pixel_aspect={_pCodecContext->sample_aspect_ratio.num}/{_pCodecContext->sample_aspect_ratio.den}";

                AVFilterContext* srcFilterCtx;
                AVFilter* srcFilter = ffmpeg.avfilter_get_by_name("buffer");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&srcFilterCtx, srcFilter, "in", args, null, filterGraph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Error creating buffer source]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }

                AVFilterContext* sinkFilterCtx;
                AVFilter* sinkFilter = ffmpeg.avfilter_get_by_name("buffersink");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&sinkFilterCtx, sinkFilter, "out", string.Empty, null, filterGraph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[Error creating buffer sink]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return;
                }


                // 添加drawtext过滤器到过滤器图中
                AVFilter* drawtextFilter = ffmpeg.avfilter_get_by_name("drawtext");
                AVFilterContext* drawtextContext = ffmpeg.avfilter_graph_alloc_filter(filterGraph, drawtextFilter, "drawtext");
                ffmpeg.av_opt_set(drawtextContext, "text", "%{pts\\:hms}", 0);
                //ffmpeg.av_opt_set(drawtextContext, "fontfile", "arial.ttf", 0);
                ffmpeg.av_opt_set(drawtextContext, "fontsize", "12", 0);
                ffmpeg.av_opt_set(drawtextContext, "fontcolor", "white", 0);
                ffmpeg.av_opt_set(drawtextContext, "x", "1", 0);
                ffmpeg.av_opt_set(drawtextContext, "y", "1", 0);
                ffmpeg.av_opt_set(drawtextContext, "bordercolor", "black", 0);
                ffmpeg.av_opt_set(drawtextContext, "borderw", "1", 0);

                // 配置过滤器图
                //ffmpeg.av_buffersink_get_frame()
                ffmpeg.avfilter_link(srcFilterCtx, 0, drawtextContext, 0);
                ffmpeg.avfilter_link(drawtextContext, 0, sinkFilterCtx, 0);
                ffmpeg.avfilter_graph_config(filterGraph, null);


                this.filterGraph = filterGraph;
                this.srcFilterCtx = srcFilterCtx;
                this.sinkFilterCtx = sinkFilterCtx;
                this.drawtextContext = drawtextContext;
                pFilteredFrame = ffmpeg.av_frame_alloc();
                IsInit = true;


                // 解码视频帧并绘制时间码
                //AVPacket packet = new AVPacket();
                //AVFrame* frame = ffmpeg.av_frame_alloc();
                //while (ffmpeg.av_read_frame(formatContext, &packet) >= 0)
                //{
                //    if (packet.stream_index == videoStreamIndex)
                //    {
                //        ffmpeg.avcodec_send_packet(codecContext, &packet);
                //        while (ffmpeg.avcodec_receive_frame(codecContext, frame) == 0)
                //        {
                //            ffmpeg.av_buffersrc_add_frame_flags(ffmpeg.av_buffersrc_get_by_name("in"), frame, ffmpeg.AV_BUFFERSRC_FLAG_KEEP_REF);
                //            AVFrame* filteredFrame = ffmpeg.av_frame_alloc();
                //            while (ffmpeg.av_buffersink_get_frame(ffmpeg.av_buffersink_get_by_name("out"), filteredFrame) == 0)
                //            {
                //                在filteredFrame上绘制时间码
                //                省略代码

                //                ffmpeg.av_frame_unref(filteredFrame);
                //            }
                //            ffmpeg.av_frame_free(&filteredFrame);
                //        }
                //    }
                //    ffmpeg.av_packet_unref(&packet);
                //}
                // 释放资源，省略代码

            }

            public bool TryDrawTimeText(ref AVFrame frame)
            {
                if (!IsInit) return false;
                AVFrame inFrame = frame;
                ffmpeg.av_frame_unref(pFilteredFrame);

                if ((ret = ffmpeg.av_buffersrc_add_frame(srcFilterCtx, &inFrame)) < 0)
                {
                    LogManage.WriteLineNewInfo($"错误:[Error adding frame to buffer source]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }
                if ((ret = ffmpeg.av_buffersink_get_frame(sinkFilterCtx, pFilteredFrame)) == 0)
                {
                    frame = *pFilteredFrame;
                    return true;
                }
                if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
                    return false;

                LogManage.WriteLineNewInfo($"错误:[Error getting filtered frame from buffer sink]:[{FFmpegHelper.av_strerror(ret)}]", true);
                return false;
            }


            private void OnDispose()
            {
                if (!disposedValue)
                {

                    var filterGraph = this.filterGraph;
                    //var outputs = this.outputs;
                    //var inputs = this.inputs;
                    var pFilteredFrame = this.pFilteredFrame;

                    //ffmpeg.avfilter_inout_free(&outputs);
                    //ffmpeg.avfilter_inout_free(&inputs);

                    ffmpeg.avfilter_free(srcFilterCtx);
                    ffmpeg.avfilter_free(sinkFilterCtx);
                    ffmpeg.avfilter_free(drawtextContext);


                    ffmpeg.avfilter_graph_free(&filterGraph);

                    ffmpeg.av_frame_unref(pFilteredFrame);
                    ffmpeg.av_frame_free(&pFilteredFrame);

                    disposedValue = true;
                }
            }
            ~DrawText2() { OnDispose(); }
            public void Dispose() { OnDispose(); GC.SuppressFinalize(this); }
        }

        internal unsafe class DrawText3
        {
            private static int ret;

            public static bool DrawText3Text(AVStream* VideoStream, AVCodecContext* _pCodecContext, Size _destinationSize,AVFrame* inFrame,out AVFrame ResultFrame) 
            {
                ResultFrame = default;
                string InArgs = $"video_size={_pCodecContext->width}x{_pCodecContext->height
                    }:pix_fmt={(int)_pCodecContext->pix_fmt
                    }:time_base={VideoStream->time_base.num}/{VideoStream->time_base.den
                    //}:frame_rate:{VideoStream->time_base.num}/{VideoStream->time_base.den
                    }:pixel_aspect={_pCodecContext->sample_aspect_ratio.num}/{_pCodecContext->sample_aspect_ratio.den}";
                //string OutArgs = $"pix_fmt={(int)AVPixelFormat.AV_PIX_FMT_RGB24}";

                AVFilterGraph* filter_graph = ffmpeg.avfilter_graph_alloc();

                AVFilterContext* buffersrc_ctx;
                AVFilter* buffersrc = ffmpeg.avfilter_get_by_name("buffer");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&buffersrc_ctx, buffersrc, "in", InArgs, null, filter_graph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[avfilter_graph_create_filter in]:[{FFmpegHelper.av_strerror(ret)}]->[{InArgs}]", true);
                    return false;
                }

                AVFilterContext* buffersink_ctx;
                AVFilter* buffersink = ffmpeg.avfilter_get_by_name("buffersink");
                if ((ret = ffmpeg.avfilter_graph_create_filter(&buffersink_ctx, buffersink, "out", null, null, filter_graph)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[avfilter_graph_create_filter out]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }

                //AVPixelFormat[] pix_fmts = { AVPixelFormat.AV_PIX_FMT_YUV420P, AVPixelFormat.AV_PIX_FMT_YUV420P, AVPixelFormat.AV_PIX_FMT_YUYV422, AVPixelFormat.AV_PIX_FMT_NONE };
                //ret = ffmpeg.av_opt_set_int_list(buffersink_ctx, "pix_fmts", pix_fmts,
                //                  AV_PIX_FMT_NONE, AV_OPT_SEARCH_CHILDREN);

                AVFilterInOut* outputs = ffmpeg.avfilter_inout_alloc();
                outputs->name = ffmpeg.av_strdup("in");
                outputs->filter_ctx = buffersrc_ctx;
                outputs->pad_idx = 0;
                outputs->next = null;

                AVFilterInOut* inputs = ffmpeg.avfilter_inout_alloc();
                inputs->name = ffmpeg.av_strdup("out");
                inputs->filter_ctx = buffersink_ctx;
                inputs->pad_idx = 0;
                inputs->next = null;

                if ((ret = ffmpeg.avfilter_graph_parse_ptr(filter_graph, "drawtext=text='%{pts\\:hms}':x=1:y=1:fontsize=12:fontcolor=white:bordercolor=black:borderw=1", &inputs, &outputs, null)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[avfilter_graph_parse_ptr args]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }

                if ((ret = ffmpeg.avfilter_graph_config(filter_graph, null)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[avfilter_graph_config]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }


                   //3.1.1 配置滤镜图输入端和输出端
                    //buffer 滤镜和 buffersink 滤镜是两个特殊的视频滤镜，分别用于视频滤镜链的输入端和输出端。
                    //与之相似，abuffer 滤镜和 abuffersink 滤镜是两个特殊的音频滤镜，分别用于音频滤镜链的输入端和输出端。
                    //一个滤镜图可能由多个滤镜链构成，每个滤镜链的输入节点就是 buffer 滤镜，输出节点是 buffersink 滤镜，因此一个滤镜图可能有多个 buffer 滤镜，也可能有多个 buffersink 滤镜。
                    //应用程序通过访问 buffer 滤镜和 buffersink 滤镜实现和滤镜图的数据交互。


                AVFrame* ResultFramePtr = ffmpeg.av_frame_alloc();
                ffmpeg.av_frame_unref(ResultFramePtr);
                if ((ret = ffmpeg.av_buffersrc_add_frame(buffersrc_ctx, inFrame)) < 0)
                {
                    LogManage.WriteLineNewInfo($"[av_buffersrc_add_frame]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }
                ret = ffmpeg.av_buffersink_get_frame(buffersink_ctx, ResultFramePtr);
                if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
                {

                }
                if (ret < 0)
                {
                    LogManage.WriteLineNewInfo($"[av_buffersink_get_frame]:[{FFmpegHelper.av_strerror(ret)}]", true);
                    return false;
                }
                return true;
            }
        }
    */

}



