//
//  AudioSession.m
//  OneToOne
//
//  Created by ByteDance on 2023/2/20.
//

#import "AudioSession.h"
#import <libavformat/avformat.h>
#import <AVFoundation/AVFoundation.h>


@interface AudioSession() <AVCaptureAudioDataOutputSampleBufferDelegate>
@property (nonatomic, strong) AVCaptureSession *audioSession;
@property (nonatomic, strong) AVCaptureDevice *audioDevice;
@property (nonatomic, strong) AVCaptureDeviceInput *audioInput;
@property (nonatomic, strong) AVCaptureAudioDataOutput *audioOutput;

@property (nonatomic, strong) AVAssetWriter *assetWriter;
@property (nonatomic, strong) AVAssetWriterInput *assetAudioInput;
@end

@implementation AudioSession

- (instancetype)init {
    self = [super init];
    if (self) {
        //[self assetWriter];
        [self audioSession];
    }
    return self;
}
#pragma mark - delegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    // 处理采集到的音频数据
//    if (self.assetWriter.status != AVAssetWriterStatusWriting) {
//        return;
//    }
//    [self.assetAudioInput appendSampleBuffer:sampleBuffer];
    
    // 获取音频帧数据的指针和大小
    CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
    size_t lengthAtOffset, totalLength;
    char *dataPointer;
    CMBlockBufferGetDataPointer(blockBuffer, 0, &lengthAtOffset, &totalLength, &dataPointer);

    // 将音频帧数据转换为 NSData 类型的数据
    //NSData *data = [NSData dataWithBytes:dataPointer length:totalLength];
}
#pragma mark - getter
- (AVCaptureSession *)audioSession {
    if (!_audioSession) {
        _audioSession = [[AVCaptureSession alloc] init];
        if ([_audioSession canAddInput:self.audioInput]) {
            [_audioSession addInput:self.audioInput];
        }
        if ([_audioSession canAddOutput:self.audioOutput]) {
            [_audioSession addOutput:self.audioOutput];
        }
        [_audioSession startRunning];
    }
    return _audioSession;
}

- (AVCaptureDeviceInput *)audioInput {
    if (!_audioInput) {
        NSError *error = nil;
        _audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.audioDevice error:&error];
    }
    return _audioInput;
}
- (AVCaptureAudioDataOutput *)audioOutput {
    if (!_audioOutput) {
        _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
        dispatch_queue_t audioQueue = dispatch_queue_create("com.example.audioQueue", NULL);
        [_audioOutput setSampleBufferDelegate:self queue:audioQueue];
    }
    return _audioOutput;
}
- (AVCaptureDevice *)audioDevice {
    if (!_audioDevice) {
        _audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    }
    return _audioDevice;
}

- (AVAssetWriter *)assetWriter {
    if (!_assetWriter) {
        NSError *error = nil;
        _assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL URLWithString:@""] fileType:AVFileTypeMPEG4 error:&error];
        // 将 videoInput 添加到 assetWriter 中
        [_assetWriter addInput:self.assetAudioInput];
    }
    return _assetWriter;
}
- (AVAssetWriterInput *)assetAudioInput {
    if (!_assetAudioInput) {
        NSDictionary *audioCompressionSettings = @{
            AVEncoderBitRatePerChannelKey: @(64000),
            AVFormatIDKey: @(kAudioFormatMPEG4AAC),
            AVNumberOfChannelsKey: @(1),
            AVSampleRateKey: @(44100),
        };
        _assetAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
        _assetAudioInput.expectsMediaDataInRealTime = YES;
    }
    return _assetAudioInput;
}
@end
