﻿using System;
using System.Collections.Generic;
using NAudio.Midi;
using NAudio.Wave;
using PT.NoteDetection.Midi;
using PT.NoteDetection.Util;

namespace PT.NoteDetection.Envelope {

    public class EnvelopeNoteDetector {

        const int Channels = 1;
        const float MinimumLevel = 0.005f;

        readonly float InstantWindowWidth;
        readonly float LocalWindowWidth;
        readonly float GlobalWindowWidth;

        readonly WaveStream source;
        readonly WaveBuffer buffer;

        readonly int bufferSizeBytes;

        float SensitivityThreshold { get; set; } // 1.4f;
        
        readonly WaveFormat format;
        readonly MovingAverage local, global, instant, variance;

        long noteOnTime;
        long noteOffTime;

        TimeService timeService;

        public TimeService TimeService {
            get { return timeService ?? (timeService = new TimeService()); }
            set { timeService = value; }
        }

        int ReleaseTime {
            get {
                 // return 0 /*TimeService.TicksPerQuarterNote / 16 */;
                 return TimeService.TicksPerQuarterNote / 16;
            }
        }  

        public EnvelopeNoteDetector(WaveStream sourceStream, TimeService timeService) {
            source = sourceStream;
            format = WaveFormat.CreateIeeeFloatWaveFormat(sourceStream.WaveFormat.SampleRate, Channels);

            if (!format.Equals(sourceStream.WaveFormat)) {
                throw new ArgumentException("Format of source stream does not match expected format. Expected: " + format);
            }
            
            // Calculate the buffer size
            const int bufSecs = 1;

            // BufSeconds * Channels=1 * SampleRate * BitsPerSecond=32 / 8 = BufSeconds * SampleRate * 4
            bufferSizeBytes = bufSecs * format.SampleRate * format.BitsPerSample / 8; 

            buffer = new WaveBuffer(bufferSizeBytes);

            TimeService = timeService;

            InstantWindowWidth =  0.03f;
            LocalWindowWidth = 60f / TimeService.Tempo / 32; // 0.002f;
            GlobalWindowWidth = 60f / TimeService.Tempo * 4;

            instant = new MovingAverage(new RectangularWindowFunction<float>(CalculateWindowWidthSamples(InstantWindowWidth)), Math.Abs);
            local = new MovingAverage(new RectangularWindowFunction<float>(CalculateWindowWidthSamples(LocalWindowWidth)), SampleOperation.Power2);
            global = new MovingAverage(new RectangularWindowFunction<float>(CalculateWindowWidthSamples(GlobalWindowWidth)), SampleOperation.Power2);
            variance = new MovingAverage(new RectangularWindowFunction<float>(CalculateWindowWidthSamples(GlobalWindowWidth)), SampleOperation.Identity);

            SensitivityThreshold = 1.4f;

            noteOnTime = -1;
        }

        public IList<NoteEvent> Read() {
            var result = new List<NoteEvent>();
            var releaseTime = ReleaseTime;

            // Read samples from stream
            var read = source.Read(buffer.ByteBuffer, 0, bufferSizeBytes);
            var sampleCount = read / sizeof (float);
            System.Diagnostics.Debug.Assert(read % sizeof (float) == 0);

            // Interpret bytes, get samples 
            for (int i = 0; i < sampleCount; ++i) {
                float sample = buffer.FloatBuffer[i];

                local.Advance(sample);
                global.Advance(sample);
                instant.Advance(sample);

                var localAvg = local.Average();
                var globalAvg = global.Average();

                var d = globalAvg - localAvg;
                variance.Advance(d * d);
                var v = variance.Average();

                if (globalAvg * SensitivityThreshold + MinimumLevel < localAvg) {
                    // Beat!
                    if (noteOnTime < 0) {
                        noteOnTime = CalculateMidiTime(i * 4, read); // i is the sample index
                        result.Add(EventHelper.CreateNoteOnEvent(noteOnTime, (sbyte) (instant.Average() * sbyte.MaxValue)));

                        System.Diagnostics.Trace.WriteLine(string.Format("NoteOn :{0} l:{1:F4} g:{2:F4} t:{3}, v:{4}", noteOnTime, localAvg, globalAvg, SensitivityThreshold, v), "Internal.Data");
                    } 
                    else if (0 < noteOffTime) {
                        noteOffTime = -1;
                    }
                }
                else if (0 <= noteOnTime && noteOffTime < 0) {
                    noteOffTime = CalculateMidiTime(i * 4, read); // i is the sample index
                }

                if (0 <= noteOffTime) {
                    var duration = CalculateMidiTime(i * 4, read) - noteOnTime;
                    if (releaseTime < duration) {
                        result.Add(EventHelper.CreateNoteOffEvent(noteOffTime));
                        noteOnTime = noteOffTime = -1;
                    }
                }
            }

            // Reached end of stream, but there still is an unprocessed note
            if (read < bufferSizeBytes && 0 <= noteOnTime) {
                result.Add(EventHelper.CreateNoteOffEvent(CalculateMidiTime(read, read)));
            }

            return result;
        }

        long CalculateMidiTime(int processedByteOffset, int chunkSizeBytes) {
            var time = CalculateTimespanFromStreamStart(processedByteOffset, chunkSizeBytes);
            return TimeService.CalculateMidiTime(time);
        }

        /// <summary>
        /// Calculates the current time based on the position in the source stream
        /// and an additional offset.
        /// </summary>
        /// <remarks>
        /// When a stream is read, it is read in chunks. CurrentTime property of the source 
        /// stream shows the current position. This includes the currently read chunk, even if it's
        /// not fully processed. So, to be precise, the unprocessed part must be subtracted 
        /// from the CurrentTime property of the source stream.
        /// </remarks> 
        /// <param name="byteOffset">Offset into the stream.</param>
        /// <param name="chunkSizeBytes">Chunk size in bytes.</param>
        /// <returns>Processed position into the source stream.</returns>
        TimeSpan CalculateTimespanFromStreamStart(int byteOffset, int chunkSizeBytes) {
            if (byteOffset < 0 || chunkSizeBytes < 0 || chunkSizeBytes < byteOffset) {
                throw new ArgumentOutOfRangeException();
            }

            var bytesPerSample = source.WaveFormat.BitsPerSample / 8;
            var bytesPerSecond = source.WaveFormat.SampleRate * bytesPerSample;

            var unprocessedBytesCount = chunkSizeBytes - byteOffset;
            var unprocessedTicks = TimeService.TicksPerSecond * unprocessedBytesCount / bytesPerSecond;
            
            return new TimeSpan(source.CurrentTime.Ticks - unprocessedTicks);
        }

        /// <summary>
        /// Calculates the sample count for the given time span (s) and a given stream format
        /// </summary>
        /// <param name="seconds">Window width (s)</param>
        /// <returns></returns>
        int CalculateWindowWidthSamples(float seconds) {
            var samples = (int) (format.SampleRate * seconds);

            if (samples < 1) {
                throw new ArgumentException(string.Format("A window width of {0} s would result in a window width of 0 samples", seconds));
            }

            return (int) (format.SampleRate * seconds);
        }

        public override string ToString() {
            return string.Format("Type: {0}. Threshold: {1}, Minimum Level: {2}, Release time: {3}, ", GetType().Name, SensitivityThreshold, MinimumLevel, ReleaseTime);
        }
    }
}