//
//  FFMPEGClinet_Audio.m
//  IPCamera
//
//  Created by 宣佚 on 15/6/16.
//  Copyright (c) 2015年 Andon Health Co,.Ltd. All rights reserved.
//

#import "FFMPEGClinet_Audio.h"
#import "VideoControlCenterViewController.h"
#import "AVFRAMEINFO.h"
#import "AVAPIs.h"
#import "IOTCAPIs.h"

#include "MPEG2AAC_ADTSTYPE.mm"


@implementation FFMPEGClinet_Audio


-(MyData)getMyData{
    return myData;
}
-(MyData *)getMyDataRef{
    return  &myData;
}
+ (FFMPEGClinet_Audio *)sharedInstance
{
    static FFMPEGClinet_Audio *manager = nil;
    
    static dispatch_once_t onceToken;
    
    dispatch_once(&onceToken, ^{
        manager = [[FFMPEGClinet_Audio alloc] init];
    });
    
    return manager;
}

#pragma mark - FFmpeg processing
- (BOOL)initFFmpegAudioStreamWithFormat:(ENUM_AUDIO_FORMAT)fmt ViewController:(id)ctl
{
    VideoControlCenterViewController *viewCtl = (VideoControlCenterViewController*)ctl;
    
    self.synlock = [[NSLock alloc] init];
    
        ///设置初始化音频参数
    
        //对AAC进行特别处理
    if(fmt == AUDIO_FORMAT_AAC)
    {
        NSLog(@"处理AAC初始化");
        
        myData.channelIndex = viewCtl.avIndex;
        
        OSStatus err = AudioFileStreamOpen(&myData, MyPropertyListenerProc, MyPacketsProc,
                                           kAudioFileAAC_ADTSType, &myData.audioFileStream);
        if (err) {
            PRINTERROR("AudioFileStreamOpen");
            return NO;
        }
        return YES;
    }
        //对于PCM和G711A进行初始化相关处理
    switch (fmt) {
        case AUDIO_FORMAT_PCM:
            audioDescription.mFormatID = kAudioFormatLinearPCM;
            audioDescription.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
            audioDescription.mSampleRate = 8000; //采样率
            
            audioDescription.mChannelsPerFrame = 1;///单声道
            audioDescription.mBitsPerChannel = 16;//每个采样点16bit量化
            
            audioDescription.mFramesPerPacket = 1;//每一个packet一侦数据
            audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel/8) * audioDescription.mChannelsPerFrame;
            audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame*audioDescription.mFramesPerPacket ;
            break;
        case AUDIO_FORMAT_G711A:
            audioDescription.mFormatID = kAudioFormatALaw;
            audioDescription.mSampleRate = 8000; //采样率
            audioDescription.mChannelsPerFrame = 1;///单声道
            audioDescription.mFramesPerPacket = 1;//每一个packet一侦数据
            break;
        default:
            NSLog(@"未知音频格式-[%lu]",(unsigned long)fmt);
            return NO;
    }
    
        //创建一个从队列到硬件的输出
    AudioQueueNewOutput(&audioDescription, AudioPlayerAQInputCallback,(__bridge void *)(ctl), nil, nil, 0, &_audioQueue);//使用player的内部线程播
    
        //分配缓冲区
    for(int i=0;i<QUEUE_BUFFER_SIZE;i++)
    {
        int result =  AudioQueueAllocateBuffer(self.audioQueue, AUDIO_BUF_SIZE, &audioQueueBuffers[i]);///创建buffer区，MIN_SIZE_PER_FRAME为每一侦所需要的最小的大小，该大小应该比每次往buffer里写的最大的一次还大
        NSLog(@"AudioQueueAllocateBuffer i = %d,result = %d",i,result);
    }
        //分配完缓冲区后，开始播放音频流
    AudioQueueStart(self.audioQueue, NULL);
    return YES;
}
static void AudioPlayerAQInputCallback(void *input, AudioQueueRef outQ, AudioQueueBufferRef outQB)
{
    NSLog(@"AudioPlayerAQInputCallback - load more audio data");
    
    VideoControlCenterViewController * viewController = (__bridge VideoControlCenterViewController *)input;

    char buf[AUDIO_BUF_SIZE];
    unsigned int frameNo;
    int ret;
    
    FRAMEINFO_t frameInfo;
    
    while(true)
    {
        ret = avCheckAudioBuf(viewController.avIndex);
        if (ret < 0) return;
        if (ret < 3) // determined by audio frame rate
        {
            usleep(120000);
            continue;
        }
        
        ret = avRecvAudioData(viewController.avIndex, buf, AUDIO_BUF_SIZE, (char *)&frameInfo, sizeof(FRAMEINFO_t), &frameNo);
        
        if(ret == AV_ER_SESSION_CLOSE_BY_REMOTE)
        {
            NSLog(@"[AudioThreadWithDecoder] AV_ER_SESSION_CLOSE_BY_REMOTE");
            return;
        }
        else if(ret == AV_ER_REMOTE_TIMEOUT_DISCONNECT)
        {
            NSLog(@"[AudioThreadWithDecoder] AV_ER_REMOTE_TIMEOUT_DISCONNECT");
            return;
        }
        else if(ret == IOTC_ER_INVALID_SID)
        {
            NSLog(@"[AudioThreadWithDecoder] Session cant be used anymore");
            return;
        }
        else if (ret == AV_ER_LOSED_THIS_FRAME)
        {
            continue;
        }

        if(ret >=0){
            [viewController.affClient readPCMAndPlay:outQ buffer:outQB WithDataBuf:buf+PADDINGBITS  AndSize:ret-PADDINGBITS];
            break;
        }
    }
}

-(void)ReadPCMAndPlay:(char *)buf BufSize:(int)size BufIndex:(int)index {
        //加一步解码
    [self readPCMAndPlay:self.audioQueue buffer:audioQueueBuffers[index] WithDataBuf:buf AndSize:size];
    
}

-(void)readPCMAndPlay:(AudioQueueRef)outQ buffer:(AudioQueueBufferRef)outQB WithDataBuf:(char*)buf AndSize:(int)size
{
    [self.synlock lock];
    
    outQB->mAudioDataByteSize = size;
    
    Byte *audiodata = (Byte *)outQB->mAudioData;
    
    memcpy(audiodata, buf, size);

    AudioQueueEnqueueBuffer(outQ, outQB, 0, NULL);
    
    [self.synlock unlock];
}
@end
