﻿using System;
using System.Net;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Ink;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Shapes;
using System.Windows.Media.DirectShow.Audio;
using System.Mathematics;
using System.Windows.Media.DirectShow.UI;
using System.Linq;

namespace System.Windows.Media.DirectShow.Filters
{
    internal class AudioPeaksVisualizationInputPin : BaseInputPin
    {

        private AMMediaType[] _mediaTypes = new AMMediaType[] { new AMMediaType {
            MajorType = MediaMajorTypes.Audio,
            SubType = MediaSubTypes.PCM,
        }};

        private AudioFrame audioFrame;
        private AudioPeaks audioPeaks;
        private AudioVisualization filter;
        private int audioFrameSize = 16384;
        private PeaksVisualization visualization;

        public AudioPeaksVisualizationInputPin(AudioVisualization filter, PeaksVisualization visualization)
            : base(filter)
        {
            this.filter = filter;
            this.visualization = visualization;
        }

        public override bool ReceiveCanBlock
        {
            get { return false; }
        }

        protected override void ReceiveConnection(IPin connector, AMMediaType mediaType)
        {
            AMMediaType oMediaType = this.filter.OutputPin.MediaTypes.First();
            oMediaType.SampleSize = mediaType.SampleSize;
            oMediaType.Format = mediaType.Format;

            WaveFormatExtensible format = oMediaType.Format as WaveFormatExtensible;
            if (format == null)
                return;

            //audioFrameSize = (int)FFT.NextPowerOfTwo((uint)(format.SamplesPerSec * format.Channels * 0.3 / 2));
            // 12 FPS
            audioFrameSize = (int)FFT.NextPowerOfTwo((uint)(format.SamplesPerSec * format.Channels / 12d));

            audioFrame = new AudioFrame(format, audioFrameSize);
            audioPeaks = new AudioPeaks(format);

            base.ReceiveConnection(connector, mediaType);
        }

        protected override void Receive(IMediaSample sample)
        {
            sample.AddRef();

            try
            {
                FFTResult[] results = audioFrame.GetFFTFrame(sample.Data, 0, (int)sample.Length);

                byte[] peaks = audioPeaks.GetPeaks(results);

                this.visualization.RenderPeakValues(peaks, sample);

                ((AudioVisualizationOutputPin)this.filter.OutputPin).ProcessSample(sample);
            }
            finally
            {
                sample.Release();
            }

            if (this.IsPullMode)
                this.RequestNextSample(sample);
        }

        public override void Active()
        {
            base.Active();

            this.visualization.Active();
        }

        public override void Inactive()
        {
            base.Inactive();

            this.visualization.Inactive();
        }

        public override void Run(TimeSpan start)
        {
            base.Run(start);

            this.visualization.Run(start);
        }

        protected override void EndOfStream()
        {
            base.EndOfStream();

            AudioVisualizationOutputPin outputPin = (AudioVisualizationOutputPin)this.filter.OutputPin;
            outputPin.Flush();
            outputPin.DeliverEndOfStream();
        }

        protected override AllocatorProperties GetAllocatorRequirements()
        {
            return new AllocatorProperties { BuffersCount = 8, BufferSize = audioFrameSize };
        }

        public override string Name
        {
            get { return "Audio peak visualization input pin"; }
        }

        public override System.Collections.Generic.IEnumerable<AMMediaType> MediaTypes
        {
            get { return _mediaTypes; }
        }
    }
}
