//
//  TTVPcmPlayer.m
//  TouchTV
//
//  Created by 周启睿 on 2021/6/24.
//  Copyright © 2021 TouchTV. All rights reserved.
//

#import "TTVPcmPlayer.h"
#import <AudioUnit/AudioUnit.h>
#import <AVFoundation/AVFoundation.h>
#import <assert.h>

const uint32_t CONST_BUFFER_SIZE = 0x10000;

#define INPUT_BUS 1
#define OUTPUT_BUS 0
#define kCacheMaxTryCount    1000

@interface TTVPcmPlayer ()

@property (nonatomic,strong) NSMutableData *cacheBufferData;
@property (nonatomic,assign) NSInteger start;
@property (nonatomic,assign) NSInteger mSampleRate;

@property (nonatomic,assign) NSInteger cacheNotEnoughCount;



@end

@implementation TTVPcmPlayer
{
    AudioUnit audioUnit;
    AudioBufferList *buffList;
}


static OSStatus PlayCallback(void *inRefCon,
                             AudioUnitRenderActionFlags *ioActionFlags,
                             const AudioTimeStamp *inTimeStamp,
                             UInt32 inBusNumber,
                             UInt32 inNumberFrames,
                             AudioBufferList *ioData) {
    TTVPcmPlayer *player = (__bridge TTVPcmPlayer *)inRefCon;
    
//    AudioBuffer audioBuffer = ioData->mBuffers[0];
//    audioBuffer.mDataByteSize = (UInt32)[player->inputSteam read:audioBuffer.mData maxLength:audioBuffer.mDataByteSize];
//    NSLog(@"out size: %d", ioData->mBuffers[0].mDataByteSize);
//    if (audioBuffer.mDataByteSize <= 0) {
//        dispatch_async(dispatch_get_main_queue(), ^{
//            [player stop];
//        });
//    }
    
    NSMutableData* cacheData = player.cacheBufferData;
    @synchronized(cacheData){
        AudioBuffer audioBuffer = ioData->mBuffers[0];
        NSUInteger len = audioBuffer.mDataByteSize;
        NSUInteger start = player.start;
        NSUInteger end = start + len;
        BOOL isCacheNotEnough = false;
        if (end > cacheData.length) {
            len = cacheData.length - start;
            isCacheNotEnough = true;
        }
        
        if (len > 0) {
            [cacheData getBytes:audioBuffer.mData range:NSMakeRange(start, len)];
            player.start = start + len;
        }
        
        if (isCacheNotEnough && cacheData.length > 0) {
            memset(audioBuffer.mData, 0, audioBuffer.mDataByteSize);
            [player cacheNotEnough]; //判断重试次数,超次数在这里stop
        }
        else{
            player.cacheNotEnoughCount = 0;
        }
    };
    return noErr;
}

- (id)initWithData:(NSData *)data sampleRate:(long)sample{
    self.mSampleRate = sample;
    [self clear];
    [self cleanBufferData];
    [self addBufferData:data];
    [self initPlayer];
    return self;
}

- (void)addBufferData:(NSData *)data{
    @synchronized (self.cacheBufferData) {
        [self.cacheBufferData appendData:data];
    };
}

- (void)cleanBufferData{
    [self.cacheBufferData resetBytesInRange:NSMakeRange(0, [self.cacheBufferData length])];
    [self.cacheBufferData setLength:0];
}

- (BOOL)play{
    
//    if (self.cacheBufferData.length == 0) {
//        return false;
//    }
    AudioOutputUnitStart(audioUnit);
    self.isPlaying = true;
    return true;
}

- (void)cacheNotEnough{
    self.cacheNotEnoughCount =+ 1;
    if (self.cacheNotEnoughCount > kCacheMaxTryCount) {
        [self stop];
    }
}

- (void)stop{
    AudioOutputUnitStop(audioUnit);
    if (buffList != NULL) {
        if (buffList->mBuffers[0].mData) {
            free(buffList->mBuffers[0].mData);
            buffList->mBuffers[0].mData = NULL;
        }
        free(buffList);
        buffList = NULL;
    }
    self.isPlaying = false;
}

- (double)getCurrentTime {
    Float64 timeInterval = 0;
    return timeInterval;
}

- (void)initPlayer {

    NSError *error = nil;
    OSStatus status = noErr;
    
    // set audio session
//    AVAudioSession *audioSession = [AVAudioSession sharedInstance];
//    [audioSession setCategory:AVAudioSessionCategoryPlayback error:&error];
//    
//    AudioComponentDescription audioDesc;
//    audioDesc.componentType = kAudioUnitType_Output;
//    audioDesc.componentSubType = kAudioUnitSubType_RemoteIO;
//    audioDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
//    audioDesc.componentFlags = 0;
//    audioDesc.componentFlagsMask = 0;
//    
//    AudioComponent inputComponent = AudioComponentFindNext(NULL, &audioDesc);
//    AudioComponentInstanceNew(inputComponent, &audioUnit);
    
    // buffer
    buffList = (AudioBufferList *)malloc(sizeof(AudioBufferList));
    buffList->mNumberBuffers = 1;
    buffList->mBuffers[0].mNumberChannels = 1;
    buffList->mBuffers[0].mDataByteSize = CONST_BUFFER_SIZE;
    buffList->mBuffers[0].mData = malloc(CONST_BUFFER_SIZE);
    
    //audio property
    UInt32 flag = 1;
    if (flag) {
        status = AudioUnitSetProperty(audioUnit,
                                      kAudioOutputUnitProperty_EnableIO,
                                      kAudioUnitScope_Output,
                                      OUTPUT_BUS,
                                      &flag,
                                      sizeof(flag));
    }
    if (status) {
        NSLog(@"AudioUnitSetProperty error with status:%d", status);
    }
    
    // format
    AudioStreamBasicDescription outputFormat;
    memset(&outputFormat, 0, sizeof(outputFormat));
    outputFormat.mSampleRate       = self.mSampleRate;// 采样率
    outputFormat.mFormatID         = kAudioFormatLinearPCM; // PCM格式
    outputFormat.mFormatFlags      = kLinearPCMFormatFlagIsSignedInteger; // 整形
    outputFormat.mFramesPerPacket  = 1; // 每帧只有1个packet
    outputFormat.mChannelsPerFrame = 1; // 声道数
    outputFormat.mBytesPerFrame    = 2; // 每帧只有2个byte 声道*位深*Packet数
    outputFormat.mBytesPerPacket   = 2; // 每个Packet只有2个byte
    outputFormat.mBitsPerChannel   = 16; // 位深
    [self printAudioStreamBasicDescription:outputFormat];

    status = AudioUnitSetProperty(audioUnit,
                                  kAudioUnitProperty_StreamFormat,
                                  kAudioUnitScope_Input,
                                  OUTPUT_BUS,
                                  &outputFormat,
                                  sizeof(outputFormat));
    if (status) {
        NSLog(@"AudioUnitSetProperty eror with status:%d", status);
    }
    
    
    // callback
    AURenderCallbackStruct playCallback;
    playCallback.inputProc = PlayCallback;
    playCallback.inputProcRefCon = (__bridge void *)self;
    AudioUnitSetProperty(audioUnit,
                         kAudioUnitProperty_SetRenderCallback,
                         kAudioUnitScope_Input,
                         OUTPUT_BUS,
                         &playCallback,
                         sizeof(playCallback));
    
    
    OSStatus result = AudioUnitInitialize(audioUnit);
    NSLog(@"result %d", result);
}

- (void)clear{
    self.cacheNotEnoughCount = 0;
    AudioOutputUnitStop(audioUnit);
    AudioUnitUninitialize(audioUnit);
    AudioComponentInstanceDispose(audioUnit);
    if (buffList != NULL) {
        free(buffList);
        buffList = NULL;
    }
    [self cleanBufferData];
}

- (void)dealloc {
    [self clear];
}

- (void)printAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd {
    char formatID[5];
    UInt32 mFormatID = CFSwapInt32HostToBig(asbd.mFormatID);
    bcopy (&mFormatID, formatID, 4);
    formatID[4] = '\0';
    printf("Sample Rate:         %10.0f\n",  asbd.mSampleRate);
    printf("Format ID:           %10s\n",    formatID);
    printf("Format Flags:        %10X\n",    (unsigned int)asbd.mFormatFlags);
    printf("Bytes per Packet:    %10d\n",    (unsigned int)asbd.mBytesPerPacket);
    printf("Frames per Packet:   %10d\n",    (unsigned int)asbd.mFramesPerPacket);
    printf("Bytes per Frame:     %10d\n",    (unsigned int)asbd.mBytesPerFrame);
    printf("Channels per Frame:  %10d\n",    (unsigned int)asbd.mChannelsPerFrame);
    printf("Bits per Channel:    %10d\n",    (unsigned int)asbd.mBitsPerChannel);
    printf("\n");
}

- (NSMutableData*)cacheBufferData{
    if (!_cacheBufferData) {
        _cacheBufferData = [NSMutableData data];
    }
    return _cacheBufferData;
}
@end
