//
//  AGDCAudioFrame.cpp
//  AgoraDemo
//
//  Created by tzx on 16/7/19.
//  Copyright © 2016年 apple. All rights reserved.
//

#import <AVFoundation/AVFoundation.h>

#include "AGDCAudioFrameObserver.h"


#include "mediaProcess.h"


#define kMixerInputBusCount 2
#define kOutputBus 0
#define kInputBus 1

FILE *g_pcmFile = NULL;

FILE *g_pInFile = NULL;

#define __AUDIO__POOL_      1


#define OT_ENABLE_AUDIO_DEBUG 0

#if OT_ENABLE_AUDIO_DEBUG
#define OT_AUDIO_DEBUG(fmt, ...) NSLog(fmt, ##__VA_ARGS__)
#else
#define OT_AUDIO_DEBUG(fmt, ...)
#endif

static double kPreferredIOBufferDuration = 0.01;



void CSouceAudioFrameObserver::pushAudioFrame(unsigned char *inAudioFrame, int frameSize)
{
    ::pushAudioFrame(mAudioPool, inAudioFrame, frameSize);
}


CSouceAudioFrameObserver::CSouceAudioFrameObserver()//:m_callback(nullptr)
{
    bool bRet = ::createAudioPool(&mAudioPool, 2048, 100);
}

int nBegin  = 0;
int nMax = 100;

FILE *fp = NULL;
bool bFlag = false;

bool CSouceAudioFrameObserver::onRecordAudioFrame(AudioFrame& audioFrame)
{
    bool bRet = true;
    

//    int nRead = fread(audioFrame.buffer, 1, audioFrame.samples*audioFrame.bytesPerSample, g_pInFile);
//    if (nRead < audioFrame.samples * audioFrame.channels) {
//        fseek(g_pInFile, 0, SEEK_SET);
//        int nRead = fread(audioFrame.buffer, 1, audioFrame.samples*audioFrame.bytesPerSample, g_pInFile);
//    }
//    return  true;
#if  __AUDIO__POOL_
    
    if(NULL == mAudioPool){
        return 0;
    }
    
    unsigned char * pData = NULL;
    int frameSize = 0;
    bRet = ::getAudioFrameBegin(mAudioPool, &pData, &frameSize);
    if(bRet == false){
        return false;
    }

    
    if(nBegin++ > nMax)
    {
        //fwrite((uint8_t*)audioFrame_->data, 1, audioFrame_->frameSize, g_pcmFile);
        //NSLog(@"fwrite pcm data.\n");
    }
    
    memcpy(audioFrame.buffer, pData, frameSize);
    
    //callCallback((uint8_t*)pData, frameSize, frameSize/2);
    ::getAudioFrameEnd(mAudioPool);
#endif
    
    return bRet;
}


bool CSouceAudioFrameObserver::onPlaybackAudioFrame(AudioFrame& audioFrame)
{
    bool bRet = true;
    
    if(m_callback)
    {
        m_callback((uint8_t*)audioFrame.buffer, audioFrame.samples*audioFrame.bytesPerSample, \
                   audioFrame.samples, true, m_context);
    }
    
    return bRet;
}

bool CSouceAudioFrameObserver::onPlaybackAudioFrameBeforeMixing(unsigned int uid, AudioFrame& audioFrame)
{
    bool bRet = true;
    
    
    return bRet;
}

static OSStatus handleInputBuffer(void *inRefCon,
                                  AudioUnitRenderActionFlags *ioActionFlags,
                                  const AudioTimeStamp *inTimeStamp,
                                  UInt32 inBusNumber,
                                  UInt32 inNumberFrames,
                                  AudioBufferList *ioData)
{
    CSouceAudioFrameObserver* pTthis =static_cast<CSouceAudioFrameObserver*>(inRefCon);
    if(!pTthis){
        return 0;
    }
    
    AudioBuffer buffer;
    buffer.mData = NULL;
    buffer.mDataByteSize = 0;
    buffer.mNumberChannels = 1;
    
    AudioBufferList buffers;
    buffers.mNumberBuffers = 1;
    buffers.mBuffers[0] = buffer;
    
    OSStatus status = AudioUnitRender(pTthis->audioUnit(),
                                      ioActionFlags,
                                      inTimeStamp,
                                      inBusNumber,
                                      inNumberFrames,
                                      &buffers);
    
    if(!status) {
        //mc->inputCallback((uint8_t*)buffers.mBuffers[0].mData, buffers.mBuffers[0].mDataByteSize, inNumberFrames);
        pTthis->pushAudioFrame((uint8_t*)buffers.mBuffers[0].mData, buffers.mBuffers[0].mDataByteSize);
        
        //pTthis->callCallback((uint8_t*)buffers.mBuffers[0].mData, buffers.mBuffers[0].mDataByteSize, inNumberFrames);
        
        
    }
    return status;
}


void CSouceAudioFrameObserver::callCallback(uint8_t* Data, int len, int inNumberFrames )
{
    if(m_callback){
        m_callback(Data, len, inNumberFrames, true, m_context);
    }
}


void CSouceAudioFrameObserver::setupAudioSession()
{
    AVAudioSession *mySession = [AVAudioSession sharedInstance];
    _previousAVAudioSessionCategory = mySession.category;
    avAudioSessionMode = mySession.mode;
    avAudioSessionPreffSampleRate = mySession.preferredSampleRate;
    avAudioSessionChannels = mySession.inputNumberOfChannels;
   
    //AVAudioSessionModeVideoChat
    //AVAudioSessionModeVoiceChat
    [mySession setMode:AVAudioSessionModeVideoChat error:nil];
    
    
    [mySession setPreferredSampleRate: kSampleRate error: nil];
    [mySession setPreferredInputNumberOfChannels:1 error:nil];
    [mySession setPreferredIOBufferDuration:kPreferredIOBufferDuration
                                      error:nil];
    
    NSUInteger audioOptions = AVAudioSessionCategoryOptionMixWithOthers;
#if !(TARGET_OS_TV)
    audioOptions |= AVAudioSessionCategoryOptionAllowBluetooth ;
    audioOptions |= AVAudioSessionCategoryOptionDefaultToSpeaker;
    BOOL bret = [mySession setCategory:AVAudioSessionCategoryPlayAndRecord
               withOptions:audioOptions
                     error:nil];
#else
    [mySession setCategory:AVAudioSessionCategoryPlayback
               withOptions:audioOptions
                     error:nil];
#endif
    
    
    //[self setupListenerBlocks];
    [mySession setActive:YES error:nil];
}


BOOL CSouceAudioFrameObserver::setupAudioUnitEX()
{
    AVAudioSession *session = [AVAudioSession sharedInstance];
    
    __block CSouceAudioFrameObserver* bThis = this;
    
#if 0
    [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionMixWithOthers error:nil];
    
#else
    
    NSUInteger audioOptions = AVAudioSessionCategoryOptionMixWithOthers;
    audioOptions |= AVAudioSessionCategoryOptionAllowBluetooth ;
    audioOptions |= AVAudioSessionCategoryOptionDefaultToSpeaker;
    [session setCategory:AVAudioSessionCategoryPlayAndRecord
               withOptions:audioOptions
                     error:nil];
#endif
    
    [session setMode:AVAudioSessionModeVideoChat error:nil];
    [session setActive:YES error:nil];
    
    [session setPreferredIOBufferDuration:0.01 error:nil];
    
    AudioComponentDescription acd;
    acd.componentType = kAudioUnitType_Output;
    acd.componentSubType = kAudioUnitSubType_VoiceProcessingIO;//kAudioUnitSubType_RemoteIO;
    acd.componentManufacturer = kAudioUnitManufacturer_Apple;
    acd.componentFlags = 0;
    acd.componentFlagsMask = 0;
    
    mComponent = AudioComponentFindNext(NULL, &acd);
    
    AudioComponentInstanceNew(bThis->mComponent, &bThis->mAudioUnit);
    if(!bThis->mAudioUnit) {
        NSLog(@"AudioComponentInstanceNew failed");
        return FALSE;
    }
    
//        if(excludeAudioUnit) {
//            excludeAudioUnit(bThis->mAudioUnit);
//        }

    UInt32 flagOne = 1;
    
    AudioUnitSetProperty(bThis->mAudioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &flagOne, sizeof(flagOne));
    
    AudioStreamBasicDescription desc = {0};
    desc.mSampleRate = kSampleRate;//bThis->m_sampleRate; //44100.0f;//32000;  //
    desc.mFormatID = kAudioFormatLinearPCM;
    desc.mFormatFlags = (kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked);
    desc.mChannelsPerFrame = 1;//bThis->m_channelCount;
    desc.mFramesPerPacket = 1;
    desc.mBitsPerChannel = 16;
    desc.mBytesPerFrame = desc.mBitsPerChannel / 8 * desc.mChannelsPerFrame;
    desc.mBytesPerPacket = desc.mBytesPerFrame * desc.mFramesPerPacket;
    
    AURenderCallbackStruct cb;
    cb.inputProcRefCon = this;
    cb.inputProc = handleInputBuffer;
    AudioUnitSetProperty(bThis->mAudioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &desc, sizeof(desc));
    AudioUnitSetProperty(bThis->mAudioUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &cb, sizeof(cb));
    
    //m_interruptionHandler = [[InterruptionHandler alloc] init];
    //m_interruptionHandler->_source = this;
    
//        [[NSNotificationCenter defaultCenter] addObserver:m_interruptionHandler selector:@selector(handleInterruption:) name:AVAudioSessionInterruptionNotification object:nil];

    AudioUnitInitialize(bThis->mAudioUnit);
    OSStatus ret = AudioOutputUnitStart(bThis->mAudioUnit);
    if(ret != noErr) {
        NSLog(@"Failed to start microphone!");
        return FALSE;
    }
    stopAudio();
    
    return TRUE;
}

void CSouceAudioFrameObserver::stopAudio() {
    NSLog(@"stopAudio");
    AudioOutputUnitStop(mAudioUnit);
}

void CSouceAudioFrameObserver::startAudio() {
    NSLog(@"startAudio");
    AudioOutputUnitStart(mAudioUnit);
}






