﻿using System;
using System.IO;
using LFSRecord2.Misc;

namespace LFSRecord2.Model.Sound
{
    public class AifStream : WaveStream
    {
        private void ReadHeader()
        {
            if (FileHelper.CharArrayToString(_audioStream.ReadChars(4)) != "FORM")
            {
                _audioStream.Close();
                throw new Exception("Invalid file format (1)");
            }

            // File length - generally ignored
            _audioStream.ReadInt32();

            if (FileHelper.CharArrayToString(_audioStream.ReadChars(4)) != "AIFF")
            {
                _audioStream.Close();
                throw new Exception("Invalid file format (2)");
            }

            int len;
            string chunkID;
            _audioFormat = new WaveFormatEx(44100, 16, 2);
            // Read chunks
            do
            {
                chunkID = FileHelper.CharArrayToString(_audioStream.ReadChars(4)).ToUpperInvariant();
                //Debug.WriteLine("ChunkID : " + chunkID);
                switch (chunkID)
                {
                    case "COMT":
                    case "CHAN":
                        len = _audioStream.ReadInt32();
                        _audioStream.Position += len;
                        break;

                    case "COMM":
                        len = _audioStream.ReadInt32();
                        if (len < 12) // bad format chunk length
                        {
                            _audioStream.Close();
                            throw new Exception("Invalid file format (3)");
                        }

                        _audioFormat.nChannels = _audioStream.ReadUInt16();
                        _audioStream.ReadUInt16();
                        _audioStream.ReadUInt16();
                        _audioFormat.wBitsPerSample = _audioStream.ReadUInt16();
                        _audioStream.ReadUInt16();
                        _audioFormat.nSamplesPerSec = _audioStream.ReadUInt16();

                        _audioFormat.nBlockAlign = (ushort)(_audioFormat.wBitsPerSample / 8.0d * _audioFormat.nChannels);
                        _audioFormat.nAvgBytesPerSec = _audioFormat.nBlockAlign * _audioFormat.nSamplesPerSec;
                        //Debug.WriteLine("block align : {0} - avgbytes : {1}", _audioFormat.nBlockAlign, _audioFormat.nAvgBytesPerSec);

                        //_audioFormat.wFormatTag = bfs.ReadUInt16();

                        // advance in the stream to skip the wave format block 
                        len -= 12; // minimum format size
                        while (len > 0)
                        {
                            _audioStream.ReadByte();
                            len--;
                        }
                        break;

                    case "SSND":
                        break;

                    default:
                        _audioStream.Close();
                        throw new Exception("Invalid file format (4) - ChunkID : " + chunkID);
                }
            } while (chunkID != "SSND");

            if (_audioStream.Position >= _audioStream.Length)
            {
                _audioStream.Close();
                throw new Exception("Invalid file format (5)");
            }

            if (_audioFormat.nChannels > 2)
            {
                _audioStream.Close();
                throw new Exception("No multichannel support");
            }

            if (_audioFormat.wBitsPerSample != 8 && _audioFormat.wBitsPerSample != 16 && _audioFormat.wBitsPerSample != 24)
            {
                _audioStream.Close();
                throw new Exception("Unsupported word length");
            }

            // data chunk has 8 spare bytes after size bytes, so we must exclude those from the final audio block
            _audioLength = _audioStream.ReadInt32() - 8;
            _audioStream.Position += 8;
            _audioDataPos = _audioStream.Position;

            Position = 0;
            AudioType = WaveTypes.Aif;
            //Duration = Length / (double)(_audioFormat.nBlockAlign * _audioFormat.nSamplesPerSec);
            Duration = Length / (double)(_audioFormat.nBlockAlign * LfsRecordController.Mixer.SampleRate);
        }

        void readChunk()
        {
        }

        public AifStream(string fileName)
            : this(new EndianFileStream(fileName, FileMode.Open, EndianTypes.Little))
        {
        }
        public AifStream(EndianFileStream stream)
        {
            _audioStream = stream;
            ReadHeader();
            GenerateWaveView();
        }
        ~AifStream()
        {
            Dispose(true);
        }
        protected override void Dispose(bool disposing)
        {
            if (disposing)
            {
                //Dispose();
                GC.SuppressFinalize(this);
            }
        }
    }
}
