﻿using Common.FrontEnd;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using Util.Props;

namespace FrontEnd
{
    /// <summary>
    /// A <code>DataProcessor</code> which wraps incoming <code>DoubleData</code>-objects into equally size blocks of defined
    /// length.
    /// </summary>
    public class DataBlocker: BaseDataProcessor
    {
        /** The property for the block size of generated data-blocks in milliseconds. */
        [S4Double(defaultValue = 10)]
        public static String PROP_BLOCK_SIZE_MS = "blockSizeMs";

        private double blockSizeMs;
        private int blockSizeSamples = int.MaxValue;

        private int curFirstSamplePos;
        private int sampleRate = -1;

        private List<DoubleData> inBuffer = new List<DoubleData>();

        private int curInBufferSize;


        public DataBlocker() 
        {
        }

        /**
        /// 
        /// @param blockSizeMs
         */
        public DataBlocker(double blockSizeMs) 
        {
            this.blockSizeMs = blockSizeMs;
        }

        override
        public void newProperties(PropertySheet propertySheet)
        {
            base.newProperties(propertySheet);
            blockSizeMs = propertySheet.getDouble(PROP_BLOCK_SIZE_MS);
        }


        public double getBlockSizeMs() {
            return blockSizeMs;
        }


        public IData getData() 
        {
            while (curInBufferSize < blockSizeSamples || curInBufferSize == 0) 
            {
                IData data = getPredecessor().getData();

                if (data is DataStartSignal) {
                    sampleRate = ((DataStartSignal) data).SampleRate;
                    blockSizeSamples = (int) Math.Round(sampleRate* blockSizeMs / 1000);

                    curInBufferSize = 0;
                    curFirstSamplePos = 0;
                
                    inBuffer.Clear();
                }

                if (!(data is DoubleData)) {
                    return data;
                }

                DoubleData dd = (DoubleData) data;

                inBuffer.Add(dd);
                curInBufferSize += dd.Values.Length;
            }

            // now we are ready to merge all data blocks into one
            double[] newSampleBlock = new double[blockSizeSamples];

            int copiedSamples = 0;

            long firstSample = inBuffer[0].getFirstSampleNumber() + curFirstSamplePos;

            while (inBuffer.Count!=0) 
            {
                DoubleData dd = inBuffer[0];
                inBuffer.RemoveAt(0);
                double[] values = dd.Values;
                int copyLength = Math.Min(blockSizeSamples - copiedSamples, values.Length - curFirstSamplePos);

                Array.Copy(values, curFirstSamplePos, newSampleBlock, copiedSamples, copyLength);

                // does the current data-object contains more samples than necessary? -> keep the rest for the next block
                if (copyLength < (values.Length - curFirstSamplePos)) 
                {
                    Trace.Assert(inBuffer.Count==0);

                    curFirstSamplePos += copyLength;
                    //inBuffer.add(0, dd);
                    inBuffer.Add(dd);
                    break;
                } else {
                    copiedSamples += copyLength;
                    curFirstSamplePos = 0;
                }
            }

            curInBufferSize = inBuffer.Count==0 ? 0 : inBuffer[0].Values.Length - curFirstSamplePos;

    //        for (int i = 0; i < newSampleBlock.length; i++) {
    //            newSampleBlock[i] *= 10;
    //        }
            return new DoubleData(newSampleBlock, sampleRate, firstSample);
        }

    }
}
