//
//  SampleHandler.m
//  in-app-audio-recording
//
//  Created by ken on 2021/11/27.
//


#import "SampleHandler.hpp"
#import "utils.hpp"

@implementation SampleHandler

- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo {
    // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
    //Set up the channel
    int s=socket(AF_INET, SOCK_DGRAM, 0);
    int flag=1;
    setsockopt(s,SOL_SOCKET,SO_REUSEADDR,&flag,sizeof(flag));
    Receiver *r=new Receiver(s);
    self->music_channel=new Channel(music,channel_unreliable,1,s,[=](Packet *p){
        return true;
    });
    r->addChannel(music_channel);
    
    //Set up audio format converter
    self->converter=nil;
    self->destinationFormat.mBitsPerChannel=8;
    self->destinationFormat.mChannelsPerFrame=1;
    self->destinationFormat.mFramesPerPacket=1;
    self->destinationFormat.mBytesPerFrame=1;
    self->destinationFormat.mBytesPerPacket=1;
    self->destinationFormat.mSampleRate=44100;
    self->destinationFormat.mFormatID=kAudioFormatLinearPCM;
    self->destinationFormat.mFormatFlags=kLinearPCMFormatFlagIsPacked;
}

- (void)broadcastPaused {
    // User has requested to pause the broadcast. Samples will stop being delivered.
}

- (void)broadcastResumed {
    // User has requested to resume the broadcast. Samples delivery will resume.
}

- (void)broadcastFinished {
    // User has requested to finish the broadcast.
}

- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
    
    OSStatus result;
    CMFormatDescriptionRef currentSampleFormatDesc=CMSampleBufferGetFormatDescription(sampleBuffer);
    const AudioStreamBasicDescription *currentSampleFormat=CMAudioFormatDescriptionGetStreamBasicDescription(currentSampleFormatDesc);
    switch (sampleBufferType) {
        case RPSampleBufferTypeVideo:
            // Handle video sample buffer
            break;
        case RPSampleBufferTypeAudioApp:
            // Handle audio sample buffer for app audio
//            if(self->converter==nil){
//                result=AudioConverterNew(currentSampleFormat, &self->destinationFormat, &self->converter);
//                NSLog(@"Created audio converter");
//                check_int(result, "Failed to create AudioConverter");
//            }
            {
                CMBlockBufferRef blockBuffer=CMSampleBufferGetDataBuffer(sampleBuffer);
                CMBlockBufferRef contiguousBuffer;
                CMBlockBufferCreateContiguous(NULL, blockBuffer, NULL, NULL, 0, 0, 0, &contiguousBuffer);
                size_t contiguousLength,totalLength;
                char *sampleData;
                CMBlockBufferGetDataPointer(contiguousBuffer, 0, &contiguousLength, &totalLength, &sampleData);
                check_bool(contiguousLength==totalLength,"CMBlock Buffer is still not continugous");
                unsigned long convertedLength=totalLength/currentSampleFormat->mChannelsPerFrame/(currentSampleFormat->mBitsPerChannel/8);
                unsigned char *convertResult=new unsigned char[convertedLength];
                long numberOfFrames=CMSampleBufferGetNumSamples(sampleBuffer),frameIndex=0,channelIndex=0,byteIndex=0;
                unsigned char convertedChannelContent;
                /*
                 AudioConverter: Failed because it always report invalid input size
                 result=AudioConverterConvertBuffer(self->converter, convertedLength, sampleData, &convertedLength, convertResult);
                check_int(result,"Failed to convert audio format");
                check_bool(convertedLength==totalLength, "Converted length is not total length");
                self->music_channel->send(converter, convertedLength, 0);*/
                
                /*
                byte-by-byte mix: Failed bacause it is not unsigned number (it is signed)
                for(frameIndex=0;frameIndex<numberOfFrames;frameIndex++){
                    channelSum[0]=channelSum[1]=0;
                    for(channelIndex=0;channelIndex<currentSampleFormat->mChannelsPerFrame;channelIndex++){
                        for(byteIndex=0;byteIndex<currentSampleFormat->mBitsPerChannel/8;byteIndex++){
                            long readIndex=frameIndex*currentSampleFormat->mBytesPerFrame+channelIndex*currentSampleFormat->mBitsPerChannel/8+byteIndex;
                            unsigned char readValue=sampleData[readIndex];
                            channelSum[byteIndex]+=((unsigned char*)sampleData)[frameIndex*currentSampleFormat->mBytesPerFrame+channelIndex*currentSampleFormat->mBitsPerChannel/8+byteIndex]/currentSampleFormat->mChannelsPerFrame;
                        }
                        
                    }
                    
                    convertResult[frameIndex]=channelSum[0];
                    
                }*/
                check_bool(numberOfFrames==convertedLength, "numberOfFrames is not convertedLength");
                check_bool(numberOfFrames*currentSampleFormat->mChannelsPerFrame*2==totalLength,"Length calculation error");
                for(frameIndex=0;frameIndex<numberOfFrames;frameIndex++){
                    convertedChannelContent=0;
                    for(channelIndex=0;channelIndex<currentSampleFormat->mChannelsPerFrame;channelIndex++){
                        long readIndex=frameIndex*currentSampleFormat->mChannelsPerFrame+channelIndex;
                        short channelContentBe=((short*)sampleData)[readIndex];
                        
                        //BigEndian->LittleEndian
                        short channelContentLe;
                        *(((char*)&channelContentLe)+1)=*(((char*)&channelContentBe)+0);
                        *(((char*)&channelContentLe)+0)=*(((char*)&channelContentBe)+1);
                        
                        unsigned short unsignedChannelContent=channelContentLe+(1 << 16)/2;//signed->unsigned
                        unsigned char convertedSingleChannelContent=unsignedChannelContent>>8;//short->char
                        //convertedChannelContent+=convertedSingleChannelContent/currentSampleFormat->mChannelsPerFrame;
                        if(channelIndex==0)convertedChannelContent=convertedSingleChannelContent;
                    }
                    convertResult[frameIndex]=convertedChannelContent;
                }
                CFRelease(contiguousBuffer);
                self->music_channel->send(convertResult, (unsigned int)convertedLength, 0);
                delete [] convertResult;
            }
            
            break;
        case RPSampleBufferTypeAudioMic:
            // Handle audio sample buffer for mic audio
            break;
            
        default:
            break;
    }
}

@end
