#include <stdio.h>
#include <string.h>
#include <netdb.h>
#include <netinet/in.h>
#include <unistd.h>
#include <pthread.h>
#include <AudioToolbox/AudioToolbox.h>

#import "IOTCAPIs.h"
#import "AVAPIs.h"
#import "AVIOCTRLDEFs.h"
#import "AVFRAMEINFO.h"
#import <UIKit/UIKit.h>

#import "globalConfiguration.h"

void MyAudioQueueOutputCallback(void* inClientData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer);

void MyPropertyListenerProc(	void *							inClientData,
                            AudioFileStreamID				inAudioFileStream,
                            AudioFileStreamPropertyID		inPropertyID,
                            UInt32 *						ioFlags);

void MyPacketsProc(				void *							inClientData,
                   UInt32							inNumberBytes,
                   UInt32							inNumberPackets,
                   const void *					inInputData,
                   AudioStreamPacketDescription	*inPacketDescriptions);


void MyPropertyListenerProc(	void *							inClientData,
                            AudioFileStreamID				inAudioFileStream,
                            AudioFileStreamPropertyID		inPropertyID,
                            UInt32 *						ioFlags)
{
        // this is called by audio file stream when it finds property values
    MyData* myData = (MyData*)inClientData;
    OSStatus err = noErr;
    
    printf("found property '%c%c%c%c'\n", (char)(inPropertyID>>24)&255, (char)(inPropertyID>>16)&255, (char)(inPropertyID>>8)&255, (char)inPropertyID&255);
    
    switch (inPropertyID) {
        case kAudioFileStreamProperty_ReadyToProducePackets :
        {
                // the file stream parser is now ready to produce audio packets.
                // get the stream format.
            AudioStreamBasicDescription asbd;
            UInt32 asbdSize = sizeof(asbd);
            err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &asbdSize, &asbd);
            if (err) { PRINTERROR("get kAudioFileStreamProperty_DataFormat"); myData->failed = true; break; }
            
                // create the audio queue
//            err = AudioQueueNewOutput(&asbd, MyAudioQueueOutputCallback, myData, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &myData->audioQueue);
              err = AudioQueueNewOutput(&asbd, MyAudioQueueOutputCallback, myData, nil, nil, 0, &myData->audioQueue);
            if (err) { PRINTERROR("AudioQueueNewOutput"); myData->failed = true; break; }
            
                // allocate audio queue buffers
            for (unsigned int i = 0; i < kNumAQBufs; ++i) {
                err = AudioQueueAllocateBuffer(myData->audioQueue, kAQBufSize, &myData->audioQueueBuffer[i]);
                if (err) { PRINTERROR("AudioQueueAllocateBuffer"); myData->failed = true; break; }
            }
            
                // get the cookie size
            UInt32 cookieSize;
            Boolean writable;
            err = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable);
            if (err) { PRINTERROR("info kAudioFileStreamProperty_MagicCookieData"); break; }
            printf("cookieSize %d\n", (unsigned int)cookieSize);
            
                // get the cookie data
            void* cookieData = calloc(1, cookieSize);
            err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData);
            if (err) { PRINTERROR("get kAudioFileStreamProperty_MagicCookieData"); free(cookieData); break; }
            
                // set the cookie on the queue.
            err = AudioQueueSetProperty(myData->audioQueue, kAudioQueueProperty_MagicCookie, cookieData, cookieSize);
            free(cookieData);
            if (err) { PRINTERROR("set kAudioQueueProperty_MagicCookie"); break; }
    
            AudioQueueStart(myData->audioQueue, NULL);
            break;
        }
    }
}

void MyPacketsProc(void *				inClientData,
                   UInt32							inNumberBytes,
                   UInt32							inNumberPackets,
                   const void *					inInputData,
                   AudioStreamPacketDescription	*inPacketDescriptions)
{
        // this is called by audio file stream when it finds packets of audio
    MyData* myData = (MyData*)inClientData;
    printf("got data.  bytes: %d  packets: %d\n", (unsigned int)inNumberBytes, (unsigned int)inNumberPackets);
    NSLog(@"获得包数据!");
    
    
    myData->audioQueueBuffer[myData->fillBufferIndex]->mAudioDataByteSize =inNumberBytes;
    
    memcpy(myData->audioQueueBuffer[myData->fillBufferIndex]->mAudioData, inInputData, inNumberBytes);
    AudioQueueEnqueueBuffer(myData->audioQueue, myData->audioQueueBuffer[myData->fillBufferIndex], inNumberPackets, inPacketDescriptions);
    
}

void MyAudioQueueOutputCallback(	void*					inClientData,
                                AudioQueueRef			inAQ,
                                AudioQueueBufferRef		inBuffer)
{
        // this is called by the audio queue when it has finished decoding our data.
        // The buffer is now free to be reused.
    MyData* myData = (MyData*)inClientData;
    
    printf("进入回调函数");
    
    char buf[AUDIO_BUF_SIZE];
    unsigned int frameNo;
    int ret;
    FRAMEINFO_t frameInfo;
    
    while(true)
    {
        ret = avCheckAudioBuf(myData->channelIndex);
        if (ret < 0) return;
        if (ret < 3) // determined by audio frame rate
        {
            usleep(120000);
            continue;
        }
        
        ret = avRecvAudioData(myData->channelIndex, buf, AUDIO_BUF_SIZE, (char *)&frameInfo, sizeof(FRAMEINFO_t), &frameNo);
        
        if(ret == AV_ER_SESSION_CLOSE_BY_REMOTE)
        {
            NSLog(@"[AudioThreadWithDecoder] AV_ER_SESSION_CLOSE_BY_REMOTE");
            return;
        }
        else if(ret == AV_ER_REMOTE_TIMEOUT_DISCONNECT)
        {
            NSLog(@"[AudioThreadWithDecoder] AV_ER_REMOTE_TIMEOUT_DISCONNECT");
            return;
        }
        else if(ret == IOTC_ER_INVALID_SID)
        {
            NSLog(@"[AudioThreadWithDecoder] Session cant be used anymore");
            return;
        }
        else if (ret == AV_ER_LOSED_THIS_FRAME)
        {
            continue;
        }
            //进行音频解码并播放
        if(ret >=0){
            
            int i ;
            for( i = 0; i < kNumAQBufs; i++)
            {
                if(inBuffer == myData->audioQueueBuffer[i])
                {
                    myData->fillBufferIndex = i;
                    break;
                }
            }
        
            OSStatus err = AudioFileStreamParseBytes(myData->audioFileStream, ret-PADDINGBITS, buf+PADDINGBITS, 0);
            if (err ) {
                NSLog(@"解析数据包错误");
                return;
            }
            break;
        }
    }
}