﻿using Microsoft.Graphics.Canvas;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.WindowsRuntime;
using System.Threading;
using System.Threading.Tasks;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Windows.Media;
using Windows.Media.Audio;
using Windows.Media.MediaProperties;
using Windows.Storage;
using Windows.Storage.Streams;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;

// The Blank Page item template is documented at https://go.microsoft.com/fwlink/?LinkId=402352&clcid=0x409

namespace uwp_player
{

    [ComImport]
    [Guid("5B0D3235-4DBA-4D44-865E-8F1D0E4FD04D")]
    [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
    unsafe interface IMemoryBufferByteAccess
    {
        void GetBuffer(out byte* buffer, out uint capacity);
    }

    /// <summary>
    /// An empty page that can be used on its own or navigated to within a Frame.
    /// </summary>
    public sealed partial class MainPage : Page
    {
        private CanvasBitmap canvasBitmap = null;
        private byte[] colorArray; 
        Random random = new Random();
        // BoundedFifoBuffer<byte> fifoBuffer = new BoundedFifoBuffer<byte>(1 * 1024 * 1024);
        Stream inputVideoFileStream = null;
        Stream inputAudioFileStream = null;

        MyPlayer.AudioDecoder audioDecoder = null;
        MyPlayer.VideoDecoder videoDecoder = null;
        MyPlayer.VideoSize size = new MyPlayer.VideoSize(0, 0);
        RingBuffer ringBufferAudio = new RingBuffer(44100 * 4 * 1);
        byte[] tmpBuffer = new byte[44100 * 4 * 1];
        AudioFrameInputNode frameInputNode;
        AudioGraph audioGraph;
        public MainPage()
        {
            this.InitializeComponent();
        }

        private void CanvasImage_Draw(Microsoft.Graphics.Canvas.UI.Xaml.CanvasControl sender, Microsoft.Graphics.Canvas.UI.Xaml.CanvasDrawEventArgs args)
        {
            var ds = args.DrawingSession;            
            ds.DrawText("hello", new System.Numerics.Vector2(), new Windows.UI.Color() { A = 23, G = 233 });            
            if (canvasBitmap != null)
            {
                ds.DrawImage(canvasBitmap, new Windows.Foundation.Rect(0.0, 0.0, sender.ActualWidth, sender.ActualHeight));
            }
            else if (size.GetWidth() > 0)
            {
                canvasBitmap = CanvasBitmap.CreateFromBytes(CanvasImage, colorArray, size.GetWidth(), size.GetHeight(), Windows.Graphics.DirectX.DirectXPixelFormat.B8G8R8A8UIntNormalized);
            }
        }


       
        private async void Page_Loaded(object sender, RoutedEventArgs e)
        {
            await InitAudioGraphAsync();
            var videoFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///1.h264"));
            var audioFile = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///1.aac"));

            inputVideoFileStream = (await videoFile.OpenAsync(FileAccessMode.Read)).AsStreamForRead();
            inputAudioFileStream = (await audioFile.OpenAsync(FileAccessMode.Read)).AsStreamForRead();


            audioDecoder = new MyPlayer.AudioDecoder(ReadAudio, AudioDecoded);
            audioDecoder.Open();

            videoDecoder = new MyPlayer.VideoDecoder(ReadVideo, ImageDecoded);
            size = videoDecoder.Open();
            CanvasImage.Invalidate();


            colorArray = new byte[size.GetWidth() * size.GetHeight() * 4];

            videoDecoder.BeginDecode();
            audioDecoder.BeginDecode();

            var mediaControls = SystemMediaTransportControls.GetForCurrentView();
            mediaControls.IsPlayEnabled = true;
            mediaControls.IsPauseEnabled = true;
            mediaControls.ButtonPressed += MediaControls_ButtonPressed;


        }

        private void MediaControls_ButtonPressed(SystemMediaTransportControls sender, SystemMediaTransportControlsButtonPressedEventArgs args)
        {
            
        }

        async Task InitAudioGraphAsync()
        {
            var aep = AudioEncodingProperties.CreatePcm(44100, 2, 16);
            aep.Subtype = "PCM";
            AudioGraphSettings settings = new AudioGraphSettings(Windows.Media.Render.AudioRenderCategory.Media);
            settings.EncodingProperties = aep;
            CreateAudioGraphResult result = await AudioGraph.CreateAsync(settings);
            if (result.Status != AudioGraphCreationStatus.Success)
            {
                throw new Exception();
            }

            audioGraph = result.Graph;


            frameInputNode = audioGraph.CreateFrameInputNode(aep);

            // Initialize the Frame Input Node in the stopped state
            // frameInputNode.Stop();

            // Hook up an event handler so we can start generating samples when needed
            // This event is triggered when the node is required to provide data
            frameInputNode.QuantumStarted += node_QuantumStarted;
            

            // Create a device output node
            CreateAudioDeviceOutputNodeResult result3 = await audioGraph.CreateDeviceOutputNodeAsync();

            if (result3.Status != AudioDeviceNodeCreationStatus.Success)
            {
                throw new Exception();
            }

            AudioDeviceOutputNode deviceOutputNode = result3.DeviceOutputNode;

            frameInputNode.AddOutgoingConnection(deviceOutputNode);
            audioGraph.Start();
        }

        private void node_QuantumStarted(AudioFrameInputNode sender, FrameInputNodeQuantumStartedEventArgs args)
        {
            // GenerateAudioData can provide PCM audio data by directly synthesizing it or reading from a file.
            // Need to know how many samples are required. In this case, the node is running at the same rate as the rest of the graph
            // For minimum latency, only provide the required amount of samples. Extra samples will introduce additional latency.
            uint numSamplesNeeded = (uint)args.RequiredSamples;

            if (numSamplesNeeded != 0)
            {
                AudioFrame audioData = GenerateAudioData(numSamplesNeeded);
                frameInputNode.AddFrame(audioData);
            }
        }

        unsafe private AudioFrame GenerateAudioData(uint samples)
        {

            int want_bytes_length = (int)samples * 4;
            int get_from_cache_length = 0;
            lock (this)
            {
                get_from_cache_length = Math.Min(want_bytes_length, ringBufferAudio.GetDataCount());
                ringBufferAudio.ReadBuffer(tmpBuffer, 0, get_from_cache_length);                
            }
            //Debug.WriteLine("-------> fill empty data:" + (want_bytes_length - get_from_cache_length).ToString());
            foreach (int i in Enumerable.Range(get_from_cache_length, want_bytes_length - get_from_cache_length))
            {
                
                tmpBuffer[i] = 0;
            }

            // Buffer size is (number of samples) * (size of each sample) * channels
            uint bufferSize = samples * sizeof(short) * 2;
            AudioFrame frame = new Windows.Media.AudioFrame(bufferSize);
            
            using (AudioBuffer buffer = frame.LockBuffer(AudioBufferAccessMode.Write))
            using (IMemoryBufferReference reference = buffer.CreateReference())
            {
                byte* dataInBytes;
                uint capacityInBytes;
                float* dataInFloat;

                // Get the buffer from the AudioFrame
                ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacityInBytes);

                foreach(var i in Enumerable.Range(0, (int)capacityInBytes))
                {
                    dataInBytes[i] = tmpBuffer[i];
                }

                //Array.Copy(tmpBuffer, dataInBytes, capacityInBytes);

                //// Cast to float since the data we are generating is float
                //dataInFloat = (float*)dataInBytes;
              
                //// Generate a 1kHz sine wave and populate the values in the memory buffer
                //for (int i = 0; i < samples; i++)
                //{
                //    int byteIndexStart = i * 4;
                //    float left = (((int)tmpBuffer[byteIndexStart] << 8)  + (int)tmpBuffer[byteIndexStart + 1]) * 1.0f;
                //    float right = (((int)tmpBuffer[byteIndexStart + 2] << 8) + (int)tmpBuffer[byteIndexStart + 3]) * 1.0f;
                //    dataInFloat[i * 2] = left / short.MaxValue;
                //    dataInFloat[i * 2 + 1] = right / short.MaxValue;
                //}
            }

            return frame;
        }

        byte[] ReadVideo(int wantedLength)
        {
            return Read(inputVideoFileStream, wantedLength);
        }

        byte[] ReadAudio(int wantedLength)
        {
            return Read(inputAudioFileStream, wantedLength);
        }

        byte[] Read(Stream stream, int wantedLength)
        {
            byte[] t = new byte[wantedLength];
            int count = stream.Read(t, 0, wantedLength);
            if (count == 0)
            {
                return new byte[0];
            }
            if (count == wantedLength)
            {
                return t;
            }
            return t.Take(count).ToArray();
        }

        public DateTimeOffset firstDecodeTime = DateTimeOffset.MinValue;
        int pic_count = 0;
        int frame_rate = 25;
        void ImageDecoded(byte[] bytes)
        {
            if (firstDecodeTime == DateTimeOffset.MinValue)
            {
                firstDecodeTime = DateTimeOffset.Now;
            }
            pic_count++;

            DateTimeOffset want_time = firstDecodeTime +  TimeSpan.FromSeconds(pic_count * 1.0 / frame_rate);
            DateTimeOffset now = DateTimeOffset.Now;
            if (want_time > now)
            {
                Thread.Sleep(want_time - now);
            }
            if (canvasBitmap != null)
            {
                canvasBitmap.SetPixelBytes(bytes);
                // Array.Copy(bytes, colorArray, colorArray.Length);
                CanvasImage.Invalidate();
            }
        }

        void AudioDecoded(byte[] bytes, int byteCount)
        {
            if (ringBufferAudio.GetReserveCount() < byteCount)
            {
                Thread.Sleep(TimeSpan.FromSeconds(0.1));
            }
            lock (this)
            {
                if (ringBufferAudio.GetReserveCount() < byteCount)
                {
                    ringBufferAudio.Clear(byteCount - ringBufferAudio.GetReserveCount());
                }
                ringBufferAudio.WriteBuffer(bytes, 0, byteCount);
            }
  
        }
    }
}

