﻿//#define DEBUG_DUMP_FRAGMENTS

using Microsoft.Media.FragmentedMediaSink;
using Microsoft.Media.IngestClient;
using Microsoft.Media.ISO;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using System.Threading;
using System.Threading.Tasks;
using Windows.Foundation;
using Windows.Media.Capture;
using Windows.Media.Devices;
using Windows.Media.MediaProperties;
using Windows.Storage.Streams;
using Windows.Web.Http;

namespace Microsoft.Media.CaptureClient
{
    /// <summary>
    /// A class capable of capturing data from the webcam and sending it to AMS live ingest service.
    /// </summary>
    public sealed class CaptureSession : IDisposable
    {
        LiveClient liveClient;
        Uri ingestionUrl;
        string streamId;
        MediaCapture mediaCapture;
        FMediaSinkProxy mediaSink;
        IDictionary<IMediaEncodingProperties, FragmentCreatedEventArgs> firstFragments;
        IList<IFragmentController> fragmentControllers;
        ulong timeStampOffset;
        DateTime recordingStartTime;
        bool isRecording = false;
        TimeSpan recordingTime;

        /// <summary>
        /// Gets or sets the fragment size. Default is 2 seconds. Should be between 2 and 6 seconds.
        /// </summary>
        public TimeSpan FragmentDuration { get; set; }

        /// <summary>
        /// Gets or sets whether to use wall clock time. If true, times are offset by 1/1/1970 UTC. Default is true.
        /// </summary>
        public bool UseWallClock { get; set; }

        /// <summary>
        /// Gets or sets output timescale to use. Default is 100 nanoseconds (10,000,000).
        /// </summary>
        public uint OutputTimeScale { get; set; }

        /// <summary>
        /// Raised when the internet connection fails and the client attempts a retry. Event args allow retry to be cancelled and include error that necessitated retry.
        /// </summary>
        public event EventHandler<PushRetryEventArgs> PushRetry;

        /// <summary>
        /// Raised when the internet connection has failed and the client is done retrying. Event args include error that caused failure.
        /// </summary>
        public event EventHandler<PushFailureEventArgs> PushFailed;

        /// <summary>
        /// Raised when a new fragment is created.
        /// </summary>
        public event EventHandler<FragmentCreatedEventArgs> FragmentCreated;

        /// <summary>
        /// Raised when a new fragment has been transmuxed and is queued to send to the server.
        /// </summary>
        public event EventHandler<FragmentDeliveryEventArgs> FragmentQueued;

        /// <summary>
        /// Raised when a fragment has been successfully sent to the server.
        /// </summary>
        public event EventHandler<FragmentDeliveryEventArgs> FragmentSent;

        /// <summary>
        /// Raised when a non-critical capture error has occurred. Critical errors are surfaced through the MediaCapture.Failed event.
        /// </summary>
        public event EventHandler<CaptureErrorEventArgs> CaptureError;

        /// <summary>
        /// Creates a new instance of CaptureSession.
        /// </summary>
        public CaptureSession()
        {
            OutputTimeScale = 10000000;
            AudioOutputEncodingProperties = new List<AudioEncodingProperties>();
            VideoOutputEncodingProperties = new List<VideoEncodingProperties>();

            UseWallClock = true;
            FragmentDuration = TimeSpan.FromSeconds(2);
            // initialize a new live client object to push the stream to the server
            liveClient = new LiveClient();
            liveClient.FragmentSent += liveClient_FragmentSent;
            liveClient.FragmentQueued += liveClient_FragmentQueued;
            liveClient.PushFailed += liveClient_PushFailed;
            liveClient.PushRetry += liveClient_PushRetry;
        }

        /// <summary>
        /// Gets or sets the maximum number of retries in the event of an internet failure.
        /// </summary>
        public int? MaxRetries
        {
            get { return liveClient.MaxRetries; }
            set { liveClient.MaxRetries = value; }
        }

        /// <summary>
        /// Gets or sets the amount of time to wait between retries in the event of an internet failure.
        /// </summary>
        public TimeSpan? RetryDelay
        {
            get { return liveClient.RetryDelay; }
            set { liveClient.RetryDelay = value; }
        }

        /// <summary>
        /// Gets the current amount of time that the session has been recording.
        /// </summary>
        public TimeSpan RecordingTime
        {
            get
            {
                if (isRecording)
                {
                    return DateTime.UtcNow.Subtract(recordingStartTime);
                }
                else
                {
                    return recordingTime;
                }
            }
            private set { recordingTime = value; }
        }

        /// <summary>
        /// Gets the size of the in-memory buffer (data that has been captured and is ready to send but is currently in the queue).
        /// </summary>
        public ulong BufferSize
        {
            get
            {
                return liveClient.BytesPushed - liveClient.BytesSent;
            }
        }

        /// <summary>
        /// Gets the last Http response status code returned.
        /// </summary>
        public HttpStatusCode HttpResponseStatusCode { get { return liveClient.HttpResponseStatusCode; } }

        /// <summary>
        /// Gets whether the session is actively recording.
        /// </summary>
        public bool IsRecording { get { return isRecording; } }

        /// <summary>
        /// Gets or sets the encoding profile to instruct the webcam to use. Note: both audio and video must be uncompressed.
        /// </summary>
        public MediaEncodingProfile CaptureEncodingProfile { get; set; }

        /// <summary>
        /// Gets or sets the list of audio tracks to construct and send to the server. Note: this must be AAC audio.
        /// </summary>
        public IList<AudioEncodingProperties> AudioOutputEncodingProperties { get; set; }

        /// <summary>
        /// Gets or sets the list of video tracks to construct and send to the server. Note: this must be H264 video.
        /// </summary>
        public IList<VideoEncodingProperties> VideoOutputEncodingProperties { get; set; }

        void mediaSink_MediaSinkError(object sender, String error)
        {
            if (CaptureError != null) CaptureError(this, new CaptureErrorEventArgs(error));
        }

        /// <summary>
        /// Start capturing and sending data.
        /// </summary>
        /// <param name="mediaCapture">An initialized MediaCapture object used to capture audio and/or video.</param>
        /// <param name="outputStream">A custom stream to dump to instead of sending to the server. Useful for debugging.</param>
        /// <returns>an awaitable async action that indicates the session has successfully started and is ready to send captured media to the server.</returns>
        public IAsyncAction StartAsync(MediaCapture mediaCapture, IOutputStream outputStream)
        {
            // initialize the live client with a stream
            Func<CancellationToken, Task> initializeAction = (c) => Task.Run(() => liveClient.Initialize(new StreamOutputConsumer(outputStream, true)));
            return AsyncInfo.Run(c => StartAsync(mediaCapture, initializeAction, c));
        }

        /// <summary>
        /// Start capturing and sending data.
        /// </summary>
        /// <param name="mediaCapture">An initialized MediaCapture object used to capture audio and/or video.</param>
        /// <param name="ingestionUrl">The AMS live ingest endpoint to stream to.</param>
        /// <param name="streamId">The id of the stream. This can be anything.</param>
        /// <returns>an awaitable async action that indicates the session has successfully started and is ready to send captured media to the server.</returns>
        public IAsyncAction StartAsync(MediaCapture mediaCapture, Uri ingestionUrl, string streamId)
        {
            this.ingestionUrl = ingestionUrl;
            this.streamId = streamId;
            // initialize the live client (note: this will test the internet connection)
            Func<CancellationToken, Task> initializeAction = (c) => liveClient.InitializeAsync(ingestionUrl, streamId).AsTask(c);
            return AsyncInfo.Run(c => StartAsync(mediaCapture, initializeAction, c));
        }

        async Task StartAsync(MediaCapture mediaCapture, Func<CancellationToken, Task> initializeAction, CancellationToken c)
        {
            this.mediaCapture = mediaCapture;
            this.mediaCapture.Failed += mediaCapture_Failed;
            RecordingTime = TimeSpan.Zero;

            // create the custom mediasink
            mediaSink = new FMediaSinkProxy();
            mediaSink.MediaSinkError += mediaSink_MediaSinkError;

            firstFragments = new Dictionary<IMediaEncodingProperties, FragmentCreatedEventArgs>();
            var audioFragmentControllers = new List<IFragmentController>();
            var videoFragmentControllers = new List<IFragmentController>();

            foreach (var videoTrackProperties in VideoOutputEncodingProperties)
            {
                var videoFragmentController = new FragmentController() { EncodingProperties = videoTrackProperties, FragmentSize = FragmentDuration };
                videoFragmentControllers.Add(videoFragmentController);
                ((FragmentController)videoFragmentController).FragmentCreated += fragmentController_FragmentCreated;
            }
            foreach (var audioTrackProperties in AudioOutputEncodingProperties)
            {
                var audioFragmentController = new FragmentController() { EncodingProperties = audioTrackProperties, FragmentSize = FragmentDuration };
                audioFragmentControllers.Add(audioFragmentController);
                ((FragmentController)audioFragmentController).FragmentCreated += fragmentController_FragmentCreated;
            }
            fragmentControllers = videoFragmentControllers.Concat(audioFragmentControllers).ToList();

            try
            {
                // initialize the live client
                await initializeAction(c);
                // start capturing to the custom media sink --> which will start sending data to the fragmentControllers
                var mfExtension = await mediaSink.InitializeAsync(CaptureEncodingProfile.Audio, CaptureEncodingProfile.Video, audioFragmentControllers, videoFragmentControllers).AsTask(c);
                await mediaCapture.StartRecordToCustomSinkAsync(CaptureEncodingProfile, mfExtension).AsTask(c);

                recordingStartTime = DateTime.UtcNow;
                isRecording = true;
                if (UseWallClock)
                {
                    var epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
                    this.timeStampOffset = (ulong)(recordingStartTime.Subtract(epoch).Ticks); // add this value to all fragment timestamps
                }
                else
                {
                    this.timeStampOffset = 0;
                }
            }
            catch
            {
                Clear();
                throw;
            }
        }
        
        void fragmentController_FragmentCreated(object sender, Fragment args)
        {
            var fragmentController = (FragmentController)sender;
            var streamIndex = fragmentControllers.IndexOf(fragmentController); // stream index comes from input stream index. With multiple tracks, we need to adjust.
            var e = FragmentCreatedEventArgs.Load(args, fragmentController.EncodingProperties, (uint)streamIndex);

            if (FragmentCreated != null) FragmentCreated(this, e);

            if (e.FragmentIndex == 0)
            {
                int fragmentCount;
                lock (firstFragments) // prevent race condition
                {
                    firstFragments.Add(fragmentController.EncodingProperties, e);
                    fragmentCount = firstFragments.Count;
                }
                if (fragmentCount == fragmentControllers.Count)
                {
                    var tracks = new List<IEncodedTrack>();
                    foreach (var frag in firstFragments.OrderBy(f => f.Value.StreamIndex)) // make sure order is consistent with stream index
                    {
                        if (frag.Key is VideoEncodingProperties)
                            tracks.Add(new EncodedVideoTrack((VideoEncodingProperties)frag.Key, frag.Value.Boxes));
                        else if (frag.Key is AudioEncodingProperties)
                            tracks.Add(new EncodedAudioTrack((AudioEncodingProperties)frag.Key, frag.Value.Boxes));
                    }
                    var piffHeader = PiffTransmuxer.CreateHeader(tracks, OutputTimeScale);

                    // start by sending the required 3 boxes (ftyp, uuid, & moov)
                    liveClient.Start(piffHeader);
                    foreach (var frag in firstFragments)
                    {
                        var timestamp = frag.Value.StartTime;
                        var duration = frag.Value.EndTime - frag.Value.StartTime;
                        var piffFragment = PiffTransmuxer.CreateFragment(frag.Value.Boxes, frag.Value.StreamIndex + 1, (uint)(frag.Value.StreamIndex + 1 + frag.Value.FragmentIndex * fragmentControllers.Count), timestamp + timeStampOffset, duration, OutputTimeScale);

                        liveClient.PushFragment(piffFragment);
                    }
                    firstFragments = null;
                }
            }
            else
            {
                var timestamp = e.StartTime;
                var duration = e.EndTime - e.StartTime;

                var piffFragment = PiffTransmuxer.CreateFragment(e.Boxes, e.StreamIndex + 1, (uint)(e.StreamIndex + 1 + e.FragmentIndex * fragmentControllers.Count), timestamp + timeStampOffset, duration, OutputTimeScale);
                liveClient.PushFragment(piffFragment);
            }
        }

        /// <summary>
        /// Indicates that the MediaCapture session is complete and should be stopped.
        /// </summary>
        /// <returns>An awaitable async action that returns when the stream has finished sending all buffered data to the server.</returns>
        public IAsyncAction StopAsync()
        {
            return AsyncInfo.Run(c => StopAsync(c));
        }

        async Task StopAsync(CancellationToken c)
        {
            await EndAsync();
            await Task.Run(() =>
            {
                var piffFooter = PiffTransmuxer.CreateFooter();
                liveClient.Complete(piffFooter);
            });
            await liveClient.FinalizeAsync().AsTask(c);
        }

        void mediaCapture_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs)
        {
            lock (sender) // prevents race condition since this event can fire on background thread more than once.
            {
                if (isRecording)
                {
                    isRecording = false;
                    RecordingTime = DateTime.UtcNow.Subtract(recordingStartTime);
                    Clear();
                }
            }
        }

        async Task EndAsync()
        {
            if (isRecording)
            {
                await mediaCapture.StopRecordAsync();
                isRecording = false;
                RecordingTime = DateTime.UtcNow.Subtract(recordingStartTime);
                Clear();
            }
        }

        void Clear()
        {
            ingestionUrl = null;
            streamId = null;
            mediaCapture.Failed -= mediaCapture_Failed;
            mediaCapture = null;
            if (mediaSink != null)
            {
                mediaSink.Dispose();
                mediaSink = null;
            }
            firstFragments = null;
            fragmentControllers = null;
        }

        void liveClient_FragmentSent(object sender, FragmentDeliveryEventArgs e)
        {
            if (FragmentSent != null) FragmentSent(this, e);
        }

        void liveClient_FragmentQueued(object sender, FragmentDeliveryEventArgs e)
        {
            if (FragmentQueued != null) FragmentQueued(this, e);
        }

        void liveClient_PushRetry(object sender, PushRetryEventArgs e)
        {
            if (PushRetry != null) PushRetry(this, e);
        }

        async void liveClient_PushFailed(object sender, PushFailureEventArgs e)
        {
            await EndAsync(); // quit the record session
            await liveClient.FinalizeAsync(); // cleanup liveClient.

            if (PushFailed != null) PushFailed(this, e);
        }

        /// <inheritdoc />
        public void Dispose()
        {
            liveClient.PushFailed -= liveClient_PushFailed;
            liveClient.PushRetry -= liveClient_PushRetry;
            liveClient.Dispose();
            liveClient = null;
        }
    }

    /// <summary>
    /// Event args used to provide information about a fragment that has been created.
    /// </summary>
    public sealed class FragmentCreatedEventArgs
    {
        FragmentCreatedEventArgs(IList<IBox> boxes, uint streamIndex, uint fragmentIndex, ulong startTime, ulong endTime, IMediaEncodingProperties encodingProperties)
        {
            Boxes = boxes;
            StreamIndex = streamIndex;
            FragmentIndex = fragmentIndex;
            StartTime = startTime;
            EndTime = endTime;
            EncodingProperties = encodingProperties;
        }

        static internal FragmentCreatedEventArgs Load(Fragment fragment, IMediaEncodingProperties encodingProperties, uint streamIndex)
        {
            var boxes = MP4BoxReader.ReadAllBoxes(fragment.Stream).ToList();
            if (boxes.Count != 4) throw new Exception("Invalid Fragment Stream");

            // IMPORTANT: modify the fragment EndTime based on the actual mp4 duration. This can be slightly different from the expected duration and cause drift over time.
            var moov = boxes.OfType<MovieBox>().First();
            var duration = moov.MovieHeader.Duration * 10000000 / moov.MovieHeader.TimeScale;
            fragment.EndTime = fragment.StartTime + duration;

            var result = new FragmentCreatedEventArgs(boxes, streamIndex, fragment.FragmentIndex, fragment.StartTime, fragment.EndTime, encodingProperties);
#if DEBUG_DUMP_FRAGMENTS
            var nowait = SaveMP4(result);
#endif
            return result;
        }

#if DEBUG_DUMP_FRAGMENTS
        static async Task SaveMP4(FragmentCreatedEventArgs e)
        {
            var folder = await Windows.Storage.ApplicationData.Current.LocalFolder.CreateFolderAsync("output", Windows.Storage.CreationCollisionOption.OpenIfExists);
            var file = await folder.CreateFileAsync(string.Format("{0}-{1}.mp4", e.FragmentIndex, e.StreamIndex), Windows.Storage.CreationCollisionOption.ReplaceExisting);
            using (var stream = await file.OpenAsync(Windows.Storage.FileAccessMode.ReadWrite))
            {
                MP4BoxWriter.WriteAllBoxes(e.Boxes, stream);
                await stream.FlushAsync();
            }
            System.Diagnostics.Debug.WriteLine(file.Path);
        }
#endif

        /// <summary>
        /// Gets the list of ISO boxes in the fragment.
        /// </summary>
        public IList<IBox> Boxes { get; private set; }

        /// <summary>
        /// Gets the zero based stream index of the fragment.
        /// </summary>
        public uint StreamIndex { get; private set; }

        /// <summary>
        /// Gets the zero based fragment index for the given stream.
        /// </summary>
        public uint FragmentIndex { get; private set; }

        /// <summary>
        /// Gets the start time in ticks since the beginning of the session. Note: the beginning time of the stream is always zero.
        /// </summary>
        public ulong StartTime { get; private set; }

        /// <summary>
        /// Gets the end time in ticks of the fragment. Duration can be derived by subtracting endtime from start time.
        /// </summary>
        public ulong EndTime { get; private set; }

        /// <summary>
        /// Gets the encoding properties used to create the fragment.
        /// </summary>
        public IMediaEncodingProperties EncodingProperties { get; private set; }
    }

    /// <summary>
    /// Event args to provide information about a non-critical capture error.
    /// </summary>
    public sealed class CaptureErrorEventArgs
    {
        internal CaptureErrorEventArgs(string error)
        {
            Error = error;
        }

        /// <summary>
        /// Gets the error message.
        /// </summary>
        public string Error { get; private set; }
    }

    /// <summary>
    /// Enumaration used to provide preferences for how the default video profile should be constructed.
    /// </summary>
    public enum VideoEncodingPreferences
    {
        /// <summary>
        /// A single bitrate with a resoultion matching the input source.
        /// </summary>
        SingleBitrate = 0,
        /// <summary>
        /// Multiple bitrates all with the same resolution as the input source. The number of tracks will be dependent on the capabilities of the input source; the higher resolution the input source, the more tracks will be created.
        /// </summary>
        MultiBitrate = 1,
        /// <summary>
        /// Multiple bitrates and resolutions. The highest resolution will match the input source. The number of tracks will be dependent on the capabilities of the input source; the higher resolution the input source, the more tracks will be created.
        /// </summary>
        MultiResolution = 2,
    }
}
