﻿using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using LFSRecord2.Model.Undo;
using LFSRecord2.Structures;
using LFSRecord2.Model.Media;
using LFSRecord2.Model.Sound;
using LFSRecord2.Misc;

namespace LFSRecord2.Model
{
    public class AudioLayer : INotifyPropertyChanged, IDisposable
    {
        public short LayerIndex { get; set; }
        public int UniqueID { get; set; }

        public CompLayerTypes LayerType
        {
            get;
            protected set;
        }

        private string _layerName = "";
        public string LayerName
        {
            get { return (string)_layerName; }
            set
            {
                _layerName = value;
                OnPropertyChanged("LayerName");
            }
        }

        private bool _isCollapsed = true;
        public bool IsCollapsed
        {
            get { return (bool)_isCollapsed; }
            set
            {
                _isCollapsed = value;
                OnPropertyChanged("IsCollapsed");
            }
        }

        private bool _isSelected = false;
        public bool IsSelected
        {
            get { return (bool)_isSelected; }
            set
            {
                bool oldValue = _isSelected;
                _isSelected = value;
                if (oldValue != _isSelected)
                    OnPropertyChanged("IsSelected");
            }
        }

        private bool _isMuted = false;
        public bool IsMuted
        {
            get { return (bool)_isMuted; }
            set
            {
                _isMuted = value;
                OnPropertyChanged("IsMuted");
            }
        }

        List<AudioRegion> _audioRegionStore = new List<AudioRegion>();
        SortableObservableCollection<AudioRegion> _audioRegions;
        public SortableObservableCollection<AudioRegion> AudioRegions
        {
            get
            {
                if (_audioRegions == null)
                    _audioRegions = new SortableObservableCollection<AudioRegion>(_audioRegionStore);
                return _audioRegions;
            }
        }

        public List<AudioLayerPropertyBase> Properties { get; protected set; }

        public bool HasSelectedProperties()
        {
            foreach (AudioLayerPropertyBase prop in Properties)
            {
                if (prop.IsSelected)
                    return true;
            }
            return false;
        }

        public event PropertyChangedEventHandler PropertyChanged;
        protected virtual void OnPropertyChanged(string propertyName)
        {
            PropertyChangedEventHandler handler = this.PropertyChanged;
            if (handler != null)
            {
                var e = new PropertyChangedEventArgs(propertyName);
                handler(this, e);
            }
        }

        public enum PropertyTypes
        {
            Volume = 0,
        }

        [Flags]
        public enum PropertyTypesFlags
        {
            None = 0,
            Volume = 1,
        }

        public AudioLayer(short layerIndex, string layerName, Object audioLock)
        {
            UniqueID = MathHelper.UniqueID;
            LayerIndex = layerIndex;
            LayerType = CompLayerTypes.Audio;
            LayerName = layerName;
            _audioLock = audioLock;

            Properties = new List<AudioLayerPropertyBase>();
            //Properties.Add(new AudioLayerPropertyDouble(this, 0, "Volume", 1, LayerPropertyValueMeaning.Percent));

            //((AudioLayerPropertyDouble)Properties[0]).MinValue = 0;
            //((AudioLayerPropertyDouble)Properties[0]).MaxValue = 1;
        }

        public void Dispose()
        {
            Dispose(true);
            GC.SuppressFinalize(this);
        }

        public event EventHandler Disposed;
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                Properties.Clear();

                EventHandler handler = this.Disposed;
                if (handler != null)
                {
                    var e = new EventArgs();
                    handler(this, e);
                }

                PropertyChanged = null;
                Disposed = null;
            }
        }


        double _position = 0;
        /// <summary>
        /// The current position in time (in seconds), indicating where we are in our virtual audio stream.
        /// </summary>
        public double Position
        {
            get { return _position; }
            set
            {
                _position = value;

                // Set the PlaybackPtr values of all regions in this layer

                lock (_audioLock)
                {
                    foreach (AudioRegion ar in AudioRegions)
                    {
                        if (ar.AudioStream != null)
                        {
                            ar.PlaybackPtr = (long)((_position - ar.TimeOffset) * (ar.AudioStream.Format.nBlockAlign * LfsRecordController.Mixer.SampleRate));
                            ar.PlaybackPtr -= ar.PlaybackPtr % ar.AudioStream.Format.nBlockAlign;
                        }
                        //Debug.WriteLine("PlaybackPtr : {0}", ar.PlaybackPtr);
                    }
                }
            }
        }

        float[][] _channelBuffer;
        const double TO_FLOAT_DIV = (double)Int32.MaxValue;
        Object _audioLock;
        public float[][] ReadAudio(WaveFormatEx format, int channelBufNumSamples, int channelBufSize)
        {
            if (_channelBuffer == null || _channelBuffer.Length != format.nChannels)
                _channelBuffer = new float[format.nChannels][];
            for (int a = 0; a < format.nChannels; a++)
            {
                if (_channelBuffer[a] == null || _channelBuffer[a].Length != channelBufSize)
                    _channelBuffer[a] = new float[channelBufSize];
                Array.Clear(_channelBuffer[a], 0, channelBufSize);
            }

            lock (_audioLock)
            {
                int smpIdx;
                long stopPtr;
                ByteBuffer sampleData;
                foreach (AudioRegion ar in AudioRegions)
                {
                    // If this region is outside of our read area, skip
                    if (ar.AudioStream == null)
                        continue;
                    else if (IsMuted ||
                             ar.PlaybackPtr + ar.AudioStream.Format.nBlockAlign * channelBufNumSamples < 0 ||
                             ar.PlaybackPtr >= ar.AudioStream.Length)
                    {
                        ar.PlaybackPtr += ar.AudioStream.Format.nBlockAlign * channelBufNumSamples;
                        continue;
                    }

                    // Prepare precise read start position
                    if (ar.PlaybackPtr < 0)
                    {
                        ar.AudioStream.Position = 0;
                        ar.PlaybackPtr = 0;
                        smpIdx = (int)-(ar.PlaybackPtr / ar.AudioStream.Format.nBlockAlign);
                    }
                    else
                    {
                        ar.AudioStream.Position = ar.PlaybackPtr;
                        smpIdx = 0;
                    }

                    stopPtr = ar.PlaybackPtr + channelBufNumSamples * ar.AudioStream.Format.nBlockAlign;
                    if (stopPtr > ar.AudioStream.Length)
                        stopPtr = ar.AudioStream.Length;

                    // Read a whole block from disk at once. Speeds things up a lot.
                    sampleData = new ByteBuffer((int)(stopPtr - ar.PlaybackPtr), ((ar.AudioStream.AudioType == WaveTypes.Aif) ? EndianTypes.Little : EndianTypes.Big));
                    ar.AudioStream.Read(sampleData.Buf, 0, (int)(stopPtr - ar.PlaybackPtr));
                    sampleData.Position = 0;

                    // Read source data into float array
                    float sample = 0.0f;
                    int sampleLen = ar.AudioStream.Format.nBlockAlign / ar.AudioStream.Format.nChannels;
                    byte[] sampleBytes = new byte[4];
                    int byteOffset = 4 - sampleLen;
                    while (ar.PlaybackPtr < stopPtr)
                    {
                        if (format.nChannels == 1)
                        {
                            // Everything to mono
                            if (ar.AudioStream.Format.nChannels == 1)
                            {
                                // Mono to mono
                                sampleData.ReadBytes(sampleBytes, byteOffset, sampleLen);
                                if (sampleLen == 1)
                                    sample = (float)(((int)sampleBytes[3] - 0x7F) / 127.0f);
                                else
                                    sample = (float)(BitConverter.ToInt32(sampleBytes, 0) / TO_FLOAT_DIV);
                            }
                            else
                            {
                                // Stereo to mono
                                sample = 0;
                                for (int c = 0; c < ar.AudioStream.Format.nChannels; c++)
                                {
                                    sampleData.ReadBytes(sampleBytes, byteOffset, sampleLen);
                                    if (sampleLen == 1)
                                        sample += (float)(((int)sampleBytes[3] - 0x7F) / 127.0f) * 0.75f;
                                    else
                                        sample += (float)(BitConverter.ToInt32(sampleBytes, 0) / TO_FLOAT_DIV) * 0.75f;
                                }
                            }
                            _channelBuffer[0][smpIdx] = sample;
                        }
                        else
                        {
                            // Everything to stereo
                            for (int c = 0; c < ar.AudioStream.Format.nChannels; c++)
                            {
                                sampleData.ReadBytes(sampleBytes, byteOffset, sampleLen);
                                if (sampleLen == 1)
                                    sample = (float)(((int)sampleBytes[3] - 0x7F) / 127.0f);
                                else
                                    sample = (float)(BitConverter.ToInt32(sampleBytes, 0) / TO_FLOAT_DIV);

                                // Mono to stereo
                                if (ar.AudioStream.Format.nChannels == 1)
                                {
                                    _channelBuffer[0][smpIdx] = sample;
                                    _channelBuffer[1][smpIdx] = sample;
                                }
                                else
                                {
                                    _channelBuffer[c][smpIdx] = sample;
                                }
                            }
                        }

                        ar.PlaybackPtr += ar.AudioStream.Format.nBlockAlign;
                        smpIdx++;
                    }

                    //Debug.WriteLine("ptr : {0} - pos : {1} - len {2}", ar.PlaybackPtr, ar.AudioStream.Position, ar.AudioStream.Length);
                }
            }

            return _channelBuffer;
        }

        public AudioRegion AddRegion(string audioFilePath, double timeOffset, double startTime, double endTime)
        {
            AudioRegion ar;
            int mediaIndex = LfsRecordController.project.HasMediaFile(audioFilePath);
            if (LfsRecordController.project.MediaObjects[mediaIndex].MediaType == MediaTypes.Audio)
            {
                MediaAudio ma = (MediaAudio)LfsRecordController.project.MediaObjects[mediaIndex];
                ar = AddRegion(new AudioRegion(
                    this,
                    timeOffset,
                    new MediaAudioIdent()
                    {
                        AudioFilePath = ma.FilePath,
                        MediaIndex = mediaIndex,
                    },
                    ma.AudioStream));
            }
            else
            {
                ar = AddRegion(new AudioRegion(
                    this,
                    timeOffset,
                    new MediaAudioIdent()
                    {
                        AudioFilePath = audioFilePath,
                        MediaIndex = -1,
                    },
                    null));
            }
            ar.StartTime = startTime;
            ar.EndTime = endTime;
            return ar;
        }

        public AudioRegion AddRegion(int mediaIndex, MediaAudio ma, double timeOffset)
        {
            return AddRegion(new AudioRegion(
                this,
                timeOffset,
                new MediaAudioIdent() 
                { 
                    AudioFilePath = ma.FilePath,
                    MediaIndex = mediaIndex,
                },
                ma.AudioStream));
        }

        public AudioRegion AddRegion(AudioRegion ar)
        {
            bool haveInserted = false;
            for (int a = 0; a < AudioRegions.Count; a++)
            {
                if (ar.TimeOffset < AudioRegions[a].TimeOffset)
                {
                    AudioRegions.Insert(a, ar);
                    haveInserted = true;
                    break;
                }
            }
            if (!haveInserted)
                AudioRegions.Add(ar);

            WriteRegionIndexes();

            return ar;
        }

        public void WriteRegionIndexes()
        {
            for (int a = 0; a < AudioRegions.Count; a++)
            {
                AudioRegions[a].RegionIndex = a;
            }
        }

        public void SortRegions()
        {
            AudioRegions.Sort(x => x.TimeOffset, ListSortDirection.Ascending);
        }

        public int LastKeyFrameNum()
        {
            int index = -1;
            foreach (AudioLayerPropertyBase prop in Properties)
            {
                //if (prop.KeyFrames.Count > 0 && prop.KeyFrames[prop.KeyFrames.Count - 1].FrameNum > index)
                //    index = prop.KeyFrames[prop.KeyFrames.Count - 1].FrameNum;
            }
            return index;
        }

        public void ReinstateAudioRegion(AudioRegionData RegionData)
        {
            int mediaIndex = LfsRecordController.project.HasMediaFile(RegionData.MediaFilePath);
            if (LfsRecordController.project.MediaObjects[mediaIndex].MediaType == MediaTypes.Audio)
            {
                AudioRegion ar = AddRegion(mediaIndex, (MediaAudio)LfsRecordController.project.MediaObjects[mediaIndex], RegionData.TimeOffset);
                ar.UniqueID = RegionData.UniqueID;
                ar.StartTime = RegionData.StartTime;
                ar.EndTime = RegionData.EndTime;
                if (RegionData.IsSelected)
                    ar.IsSelected = true;
            }
        }

        public AudioLayerData DataStructure()
        {
            Debug.WriteLine("Creating Audio Datastructure");
            AudioLayerData data = new AudioLayerData()
            {
                LayerIndex = LayerIndex,
                UniqueID = UniqueID,
                LayerType = LayerType,
                LayerName = LayerName,
                IsCollapsed = IsCollapsed,
                IsSelected = IsSelected,
                NumRegions = AudioRegions.Count,
                AudioRegions = new AudioRegionData[AudioRegions.Count],
                NumProperties = Properties.Count,
            };

            // populate audio regions
            for (int a = 0; a < AudioRegions.Count; a++)
            {
                data.AudioRegions[a] = AudioRegions[a].DataStructure();
            }

            // populate properties


            return data;
        }
    }
}
