//
//  AudioResampleViewController.m
//  FFMpegKitDemo
//
//  Created by ilongge on 2023/8/11.
//

#import "AudioResampleViewController.h"

@interface AudioResampleViewController ()
{
    AVFormatContext *_formatContext;
    AVCodecContext *_audioCodecContext;
    const struct AVCodec *_audioCodec;
    int _audioStreamIndex;
    AVFrame *_audioFrame;
    AVStream *_audioStream;
    
    int _dstAudioChannels;
    int _dstAudioSampleRate;
    enum AVSampleFormat _dstAudioSampleFmt;
    
    SwrContext *_swrContext;
    AVChannelLayout _outLayout;
    
    NSString *_audioFile;
    
    dispatch_queue_t _decode_queue;
}
@property (weak, nonatomic) IBOutlet UITextView *consoleText;
@property (weak, nonatomic) IBOutlet UIProgressView *progressView;

@end

@implementation AudioResampleViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    self.title = @"音频重采样";
    _audioFile = @"罗刹海市_44100_2.aac";
    _decode_queue = dispatch_queue_create("com.ilongge.queue.resample", DISPATCH_QUEUE_SERIAL);
    self.progressView.progress = 0.00;
    [self initCodec];
    [self initDstParameter];
    [self createSwrContext];
}

- (void)initDstParameter
{
    AVAudioSession *audioSession = [AVAudioSession sharedInstance];
    _dstAudioChannels = 2;//(int)audioSession.outputNumberOfChannels;
    _dstAudioSampleRate = audioSession.sampleRate;
    _dstAudioSampleFmt = AV_SAMPLE_FMT_FLTP;
    printf("channels: %d sampleRate: %d sampleFmt: AV_SAMPLE_FMT_S16", _dstAudioChannels, _dstAudioSampleRate);
    // 声道出参
    if (_dstAudioChannels == 1) {
        _outLayout = (AVChannelLayout)AV_CHANNEL_LAYOUT_MONO;
    }
    else if (_dstAudioChannels == 2) {
        _outLayout = (AVChannelLayout)AV_CHANNEL_LAYOUT_STEREO;
    }
    else if (_dstAudioChannels == 3) {
        _outLayout = (AVChannelLayout)AV_CHANNEL_LAYOUT_2POINT1;
    }
    else if (_dstAudioChannels == 6) {
        _outLayout = (AVChannelLayout)AV_CHANNEL_LAYOUT_5POINT1;
    }
    else{
        _outLayout.nb_channels = _dstAudioChannels;
    }
    {
        [self printMessage:@"\n根据当前设备支持的参数进行重采样" breakline:YES];
        [self printMessage:@"输出:" breakline:YES];
        [self printMessage:[NSString stringWithFormat:@"\tChannel: %d", _dstAudioChannels] breakline:YES];
        [self printMessage:[NSString stringWithFormat:@"\tSampleRate: %d", _dstAudioSampleRate] breakline:YES];
        [self printMessage:[NSString stringWithFormat:@"\tSampleFmt: %d", AV_SAMPLE_FMT_S16] breakline:YES];
    }
}

- (void)initCodec
{
    NSString *pathString = [[NSBundle mainBundle] pathForResource:_audioFile ofType:nil];
    if (pathString == nil) {
        NSLog(@"file not exsit");
        return;
    }
    // 打开音频文件并获取相关上下文
    int ret = avformat_open_input(&_formatContext, pathString.UTF8String, NULL, NULL);
    if (ret != 0) {
        NSLog(@"avformat_open_input : %d", ret);
        return;
    }
    ret = avformat_find_stream_info(_formatContext, NULL);
    if (ret != 0) {
        NSLog(@"avformat_find_stream_info : %d", ret);
        return;
    }
    _audioStreamIndex = -1;
    // 寻找音频流
    _audioStreamIndex = av_find_best_stream(_formatContext, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0);
    if (_audioStreamIndex < 0) {
        NSLog(@"av_find_best_stream : %d", ret);
        return;
    }
    // 获取音频流
    _audioStream = _formatContext->streams[_audioStreamIndex];
    // 获取解码器
    _audioCodec = avcodec_find_decoder(_audioStream->codecpar->codec_id);
    // 获取解码器上下文
    _audioCodecContext = avcodec_alloc_context3(_audioCodec);
    // 打开解码器
    ret = avcodec_open2(_audioCodecContext, _audioCodec, NULL);
    if (ret != 0) {
        NSLog(@"avcodec_open2 : %d", ret);
        return;
    }
    // 申请AVPacket和AVFrame以及相关设置
    _audioFrame = av_frame_alloc();
    {
        [self printMessage:@"输入文件:" breakline:YES];
        [self printMessage:pathString breakline:YES];
        NSString *info_string = [FFMpegKitTool av_dump_format:_formatContext streamIndex:0 url:pathString is_output:NO];
        [self printMessage:info_string breakline:YES];
    }
}

- (void)createSwrContext
{
    if (_formatContext == NULL || _audioStream == NULL) {
        NSLog(@"createSwrContext Fail");
        return;
    }
    // 声道入参
    AVChannelLayout inLayout = _audioStream->codecpar->ch_layout;
    // 原始采样格式
    enum AVSampleFormat sample_fmt = _audioStream->codecpar->format;
    // 原始采样率
    int sample_rate = _audioStream->codecpar->sample_rate;
    // 初始化重采样上下文
    _swrContext = NULL;
    int ret = swr_alloc_set_opts2(&_swrContext,
                                  &_outLayout,
                                  _dstAudioSampleFmt,
                                  _dstAudioSampleRate,
                                  &inLayout,
                                  sample_fmt,
                                  sample_rate,
                                  0,
                                  NULL);
    if (ret != 0) {
        NSLog(@"swr_alloc_set_opts2 : %d", ret);
        swr_free(&_swrContext);
    }
    {
        [self printMessage:@"\n创建SwrContext" breakline:YES];
    }
}

- (IBAction)startResampleAction:(UIButton *)sender {
    sender.userInteractionEnabled = NO;
    [self audioResample];
    sender.userInteractionEnabled = YES;
}

- (void)audioResample
{
    if (_formatContext == NULL || _audioStream == NULL || _audioCodecContext == NULL) {
        NSLog(@"audioResample Fail");
        return;
    }
    dispatch_async(_decode_queue, ^{
        NSString *pathString = [NSString stringWithFormat:@"%@/Documents/%@_Resample.pcm", NSHomeDirectory(), self->_audioFile];
        [[NSFileManager defaultManager] createFileAtPath:pathString contents:nil attributes:nil];
        NSFileHandle *file = [NSFileHandle fileHandleForWritingAtPath:pathString];
        NSInteger frameCount = 1;
        float totle_length = self->_formatContext->duration * self->_audioStream->codecpar->sample_rate / self->_audioStream->codecpar->frame_size / 1000.0 / 1000.0;
        {
            [self printMessage:@"输出文件:" breakline:YES];
            [self printMessage:pathString breakline:YES];
            [self printMessage:@"开始解码" breakline:YES];
            [self printMessage:@"开始重采样" breakline:YES];
            [self printMessage:@"" breakline:YES];
        }
        AVPacket *packet = av_malloc(sizeof(AVPacket));
        while (true) {
            int ret = av_read_frame(self->_formatContext, packet);
            if (ret == 0) {
                frameCount ++;
                if (packet->stream_index == self->_audioStreamIndex) {
                    ret = avcodec_send_packet(self->_audioCodecContext, packet);
                    while (ret >= 0) {
                        ret = avcodec_receive_frame(self->_audioCodecContext, self->_audioFrame);
                        if (ret == AVERROR_EOF) {
                            break;
                        }
                        else if (ret == AVERROR(EAGAIN)) {
                            break;
                        }
                        else if (ret < 0) {
                            
                            break;
                        }
                        else{
                            {
                                dispatch_sync(dispatch_get_main_queue(), ^{
                                    float progress = frameCount / totle_length;
                                    float value = progress - self.progressView.progress;
                                    if (value > 0.01) {
                                        self.progressView.progress = progress;
                                        [self printMessage:@"." breakline:NO];
                                    }
                                });
                            }
                            AVFrame *frame = [self audioConvert:self->_audioFrame
                                                        chanels:self->_dstAudioChannels
                                                      sampleFmt:self->_dstAudioSampleFmt
                                                     sampleRate:self->_dstAudioSampleRate];
                            NSMutableData * channelData = [NSMutableData data];
                            switch (self->_dstAudioSampleFmt) {
                                    // normal
                                case AV_SAMPLE_FMT_S16:
                                case AV_SAMPLE_FMT_S32:
                                case AV_SAMPLE_FMT_FLT:
                                case AV_SAMPLE_FMT_DBL:
                                case AV_SAMPLE_FMT_S64:
                                {
                                    NSInteger length = frame->nb_samples * self->_dstAudioChannels * av_get_bytes_per_sample(self->_dstAudioSampleFmt);
                                    [channelData appendData:[NSData dataWithBytes:frame->data[0] length:length]];
                                }
                                    break;
                                    // planar
                                case AV_SAMPLE_FMT_S16P:
                                case AV_SAMPLE_FMT_S32P:
                                case AV_SAMPLE_FMT_FLTP:
                                case AV_SAMPLE_FMT_DBLP:
                                case AV_SAMPLE_FMT_S64P:
                                {
                                    // samples
                                    for (NSInteger sample_index = 0; sample_index < frame->nb_samples; sample_index++) {
                                        NSMutableData *sampleData = [NSMutableData data];
                                        // channels
                                        for (NSInteger channel_index = 0; channel_index < self->_dstAudioChannels; channel_index++) {
                                            NSInteger length = av_get_bytes_per_sample(self->_dstAudioSampleFmt);
                                            const void * offset = frame->data[channel_index] + sample_index * length;
                                            NSData *pcmData = [NSData dataWithBytes:offset length:length];
                                            [sampleData appendData:pcmData];
                                        }
                                        [channelData appendData:sampleData];
                                    }
                                }
                                    break;
                                default:
                                    break;
                            }
                            [file seekToEndOfFile];
                            [file writeData:channelData];
                            [file synchronizeFile];
                            // 使用完了释放掉
                            av_frame_free(&frame);
                        }
                    }
                }
                else{
                    av_packet_unref(packet);
                    continue;
                }
            }
            else{
                av_packet_unref(packet);
                break;
            }
        }
        {
            [self printMessage:@"解码完成" breakline:YES];
            [self printMessage:@"重采样完成" breakline:YES];
            [self printMessage:@"共" breakline:YES];
            [self printMessage:@(frameCount).description breakline:NO];
            [self printMessage:@"frames" breakline:NO];
        }
        av_seek_frame(self->_formatContext, self->_audioStreamIndex, 0, 0);
        [file closeFile];
        av_packet_free(&packet);
        av_frame_free(&self->_audioFrame);
    });
}

- (AVFrame *)audioConvert:(AVFrame *)inFrame
                  chanels:(int)dstChannels
                sampleFmt:(enum AVSampleFormat)dstSampleFmt
               sampleRate:(int)dstSampleRate
{
    int ret = -1;
    // 创建输出音频帧
    AVFrame *swrFrame = av_frame_alloc();
    swrFrame->ch_layout = _outLayout;
    swrFrame->sample_rate = dstSampleRate;
    swrFrame->format = dstSampleFmt;
    ret = swr_convert_frame(_swrContext, swrFrame, inFrame);
    if (ret < 0) {
        NSLog(@"swr_convert_frame %d", ret);
        av_frame_free(&swrFrame);
        return NULL;
    }
    return swrFrame;
}

- (void)printMessage:(NSString *)message breakline:(BOOL)breakline
{
    dispatch_async(dispatch_get_main_queue(), ^{
        NSMutableString *string = [[NSMutableString alloc] init];
        if (self.consoleText.text.length) {
            [string appendString:self.consoleText.text];
        }
        if (string.length && breakline){
            [string appendString:@"\n"];
            printf("\n");
        }
        else if (string.length && breakline == NO){
            [string appendString:@" "];
            printf(" ");
        }
        if (message.length) {
            [string appendString:message];
            printf("%s", message.UTF8String);
        }
        self.consoleText.text = string;
        CGFloat offset = self.consoleText.contentSize.height - self.consoleText.frame.size.height;
        BOOL needTrans = offset > self.consoleText.contentOffset.y;
        if (offset > 0 && needTrans) {
            [self.consoleText setContentOffset:CGPointMake(0, offset) animated:NO];
        }
    });
}
- (void)dealloc
{
    if (_formatContext != NULL) {
        avformat_close_input(&_formatContext);
    }
    
    if (_audioCodecContext != NULL) {
        avcodec_free_context(&_audioCodecContext);
    }
    if (_swrContext != NULL) {
        swr_free(&_swrContext);
    }
    printf("%s\n", __func__);
}
@end
