//
//  Mp4Recorder.m
//  TRTC-API-Example-OC
//
//  Created by rykeryin on 2021/8/23.
//

#import "TRTCRemoteRecorder.h"

#define WEAK_SELF __weak typeof(self) weakSelf = self;
#define STRONG_SELF __strong typeof (weakSelf) strongSelf = weakSelf;


typedef NS_ENUM(NSInteger, Mp4RecorderStatus) {
    Mp4RecorderStatusWaitStarting, // 参数不全等待启动
    Mp4RecorderStatusStarting, // 启动中
    Mp4RecorderStatusRecording, // 录制中
    Mp4RecorderStatusStoping, // 结束中
    Mp4RecorderStatusStoped // 已经录制结束
};

// Qos会实时调节的参数
@interface TRTCQosConfig : NSObject
@property (assign, nonatomic) int32_t width;
@property (assign, nonatomic) int32_t height;
@property (assign, nonatomic) int32_t sampleRate;
@property (assign, nonatomic) int32_t channels;
@end
@implementation TRTCQosConfig
@end


@interface TRTCRemoteRecorder ()

// 初始化属性
@property (nonatomic, strong) TRTCRecordConfig *config;
// 回调代理
@property (nonatomic, weak) id<TRTCRemoteRecorderDelegate> delegate;

@property (strong, nonatomic) AVAssetWriter * writer ;
@property (strong, nonatomic) AVAssetWriterInput * audioInput ;
@property (strong, nonatomic) AVAssetWriterInput * videoInput ;
@property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
// 运行状态
@property (assign, atomic) Mp4RecorderStatus status;
// Qos调节参数
@property (nonatomic, strong) TRTCQosConfig *qosConfig;
// 文件编号
@property (assign, nonatomic) int fileIndex;
// 视频最新PTS
@property (assign, nonatomic) CMTime lastVideoPts;
// 音频最新PTS
@property (assign, nonatomic) CMTime lastAudioPts;
//@property (assign, nonatomic) TRTCVideoFrame *lastVFrame;
@property (assign, nonatomic) float audioOffset;
// 录制队列
@property (nonatomic, strong) dispatch_queue_t record_queue;

@end

@implementation TRTCRemoteRecorder

- (instancetype)init {
    if (self = [super init]) {
        self.record_queue = dispatch_queue_create("record queue", DISPATCH_QUEUE_SERIAL);
        self.status = Mp4RecorderStatusStoped;
    }
    return self;
}

#pragma mark - Public

- (void)setConfig:(TRTCRecordConfig *)config {
    _config = config;
}

- (void)setDelegate:(id<TRTCRemoteRecorderDelegate>)delegate {
    _delegate = delegate;
}

// 启动录制
- (void)startRecord {
    self.fileIndex = 0;
    self.status = Mp4RecorderStatusWaitStarting;
    self.qosConfig = [[TRTCQosConfig alloc] init];
}
// 停止录制
- (void)stopRecord {
    WEAK_SELF
    dispatch_async(self.record_queue, ^{
        STRONG_SELF
        [strongSelf stopRecordSync:YES last:YES];
    });
}

#pragma mark - Private

- (BOOL)isQosParamInited {
    return self.qosConfig.sampleRate != 0 &&
    self.qosConfig.channels != 0 &&
    self.qosConfig.width != 0 &&
    self.qosConfig.height != 0;
}

// 刷新音频参数, 参数变化需立即停止Mp4写入
- (void)refreshAudioParams:(int)sampleRate channels:(int)channels {
    BOOL inited = [self isQosParamInited];
    BOOL paramChanged = self.qosConfig.sampleRate != sampleRate || self.qosConfig.channels != channels;
    self.qosConfig.sampleRate = sampleRate;
    self.qosConfig.channels = channels;
    BOOL qosAdjust = inited && paramChanged;
    if (!inited) return;
    [self triggerRecord:qosAdjust];
    
}

// 刷新视频参数, 参数变化需立即停止Mp4写入
- (void)refreshVideoParams:(int)width height:(int)height {
    BOOL inited = [self isQosParamInited];
    BOOL paramChanged = self.qosConfig.width != width || self.qosConfig.height != height;
    self.qosConfig.width = width;
    self.qosConfig.height = height;
    BOOL qosAdjust = inited && paramChanged;
    if (!inited) return;
    [self triggerRecord:qosAdjust];
}

// 触发录制事件
- (void)triggerRecord:(BOOL)adjust {
    BOOL needReStart = NO;
    BOOL firstRecord = NO;
    
    if (self.status == Mp4RecorderStatusWaitStarting) {
        self.status = Mp4RecorderStatusStarting;
        // 首次启动录制，参数完整则触发录制任务
        NSLog(@"首次启动录制");
        needReStart = YES;
        // 首次启动录制
        firstRecord = YES;
    }
    if (self.status != Mp4RecorderStatusWaitStarting && adjust) {
        NSLog(@"调整参数后");
        // 非首次启动录制，参数发生改变则触发录制任务
        needReStart = YES;
        // 快速置到停止状态，防止继续写入异常数据导致文件异常
        self.status = Mp4RecorderStatusStoping;
    }
    if (needReStart) {
        // 无论第一次还是第N次，都先stop后再启动录制。stop、start必须同步执行
        WEAK_SELF
        dispatch_async(self.record_queue, ^{
            STRONG_SELF
            // 同步停止
//            [strongSelf stopRecordSync:YES last:!firstRecord];
            [strongSelf startRecord:firstRecord config:self.qosConfig];
        });
        return;
    }
    
}

// 自定义渲染回调在长时间运行后音频与视频存在时间戳的偏差
- (void)fixPtsOffset {
    self.audioOffset = CMTimeGetSeconds(self.lastVideoPts) - CMTimeGetSeconds(self.lastAudioPts);
}

// 调用此函数的前提是已获取到视频和音频参数
- (void)startRecord:(BOOL)first
             config:(TRTCQosConfig *)qosConfig {
    NSLog(@"=========启动中");
    // 标记状态：启动中
    self.status = Mp4RecorderStatusStarting;
    NSError *error = nil;
    NSArray *fileTokens = [self.config.filePath componentsSeparatedByString:@"."];
    NSString *fPath = [NSString stringWithFormat:@"%@.mp4", fileTokens[0]];
    self.writer = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:fPath] fileType:AVFileTypeMPEG4 error:&error];
    if (error) {
        [self sendEvent:TRTCRemoteRecordStartFailed detail:@"AVAssetWriter init failed"];
        return;
    }
    // 添加音频input
    if(![self addAudioInput:qosConfig.sampleRate channels:qosConfig.channels]) {
        [self sendEvent:TRTCRemoteRecordStartFailed detail:@"添加音频Input失败"];
        return;
    }
    // 添加视频input
    if(![self addVideoInput:qosConfig.width height:qosConfig.height]) {
        [self sendEvent:TRTCRemoteRecordStartFailed detail:@"添加视频Input失败"];
        return;
    }
    // 启动录制
    if (self.writer.status == AVAssetWriterStatusUnknown) {
        BOOL success = [self.writer startWriting];
        [self.writer startSessionAtSourceTime:self.lastVideoPts];
        if (first) {
            if (success) {
                [self sendEvent:TRTCRemoteRecordStartSuccess detail:@"启动录制成功"];
            }
            else {
                [self sendEvent:TRTCRemoteRecordStartSuccess detail:@"启动录制失败"];
            }
        }
    }
    [self fixPtsOffset];
    self.status = Mp4RecorderStatusRecording;
    NSLog(@"开始录制视频：%@", self.config.filePath);
}

// 停止录制
- (void)stopRecordSync:(BOOL)sync last:(BOOL)last {
    WEAK_SELF
    if (sync) {
        dispatch_group_t group = dispatch_group_create();
        dispatch_group_enter(group);
        if (self.writer.status == AVAssetWriterStatusWriting) {
            [self.writer finishWritingWithCompletionHandler:^{
                STRONG_SELF
                dispatch_group_leave(group);
                strongSelf.status = Mp4RecorderStatusStoped;
                if (last) {
                    [self sendEvent:TRTCRemoteRecordComplete detail:@"录制结束"];
                    [self sendEvent:TRTCRemoteRecordStopSuccess detail:@"停止录制成功"];
                }
            }];
        }
        else {
            dispatch_group_leave(group);
        }
        dispatch_group_wait(group, DISPATCH_TIME_FOREVER);
    }
    else {
        if (self.writer.status == AVAssetWriterStatusWriting) {
            [self.writer finishWritingWithCompletionHandler:^{
                STRONG_SELF
                strongSelf.status = Mp4RecorderStatusStoped;
                if (last) {
                    [self sendEvent:TRTCRemoteRecordComplete detail:@"录制结束"];
                    [self sendEvent:TRTCRemoteRecordStopSuccess detail:@"停止录制成功"];
                }
            }];
        }
    }
}


/**
    写入视频帧
 */
- (void)pushVideoFrame:(TRTCVideoFrame *)vFrame {
    if (!vFrame.pixelBuffer) {
//        NSLog(@"error: vFrame is null\n");
        return;
    }
    
    [self refreshVideoParams:vFrame.width height:vFrame.height];
    
    self.lastVideoPts = CMTimeMakeWithSeconds(vFrame.timestamp / 1000.0, 30);
    
    if (self.status != Mp4RecorderStatusRecording) {
        return;
    }
    CFRetain(vFrame.pixelBuffer);
    // 在子线程中拼接Mp4文件
    WEAK_SELF
    dispatch_async(self.record_queue, ^{
        STRONG_SELF
        if (strongSelf.writer.status == AVAssetWriterStatusWriting && strongSelf.videoInput.readyForMoreMediaData && strongSelf.status == Mp4RecorderStatusRecording) {
//            printf("video add pts: %f\n", CMTimeGetSeconds(self.lastVideoPts));
            CMTime pts = CMTimeMakeWithSeconds(vFrame.timestamp / 1000.0, 30);
            BOOL success = [strongSelf.videoAdaptor appendPixelBuffer:vFrame.pixelBuffer withPresentationTime:pts];
            if(!success) {
                // 添加视频帧失败
                self.status = Mp4RecorderStatusStoped;
                [self sendEvent:TRTCRemoteRecordVideoEncodeError detail:@"视频编码失败"];
            };
//            NSLog(@"video buffer wirte success") ;
        }
        else{
//            NSLog(@"warn: video lost buffer");
        }
        CFRelease(vFrame.pixelBuffer);
    });
    
}
/**
    写入音频帧
 */
- (void)pushAudioFrame:(TRTCAudioFrame *)aFrame {
    if (!aFrame.data) {
//        NSLog(@"error: aFrame is null\n");
        return;
    }
    // 可能返回0
    if ((int)aFrame.timestamp != 0) {
        self.lastAudioPts = CMTimeMakeWithSeconds(aFrame.timestamp / 1000.0, (int32_t)aFrame.sampleRate);
    }
    
    [self refreshAudioParams:(int)aFrame.sampleRate channels:aFrame.channels];
        
    if (self.status != Mp4RecorderStatusRecording) {
        return;
    }
    // fix audio timestamp is 0
    if (aFrame.timestamp == 0) {
        aFrame.timestamp = CMTimeGetSeconds(self.lastAudioPts) * 1000;
    }
    // fix audio timestamp offset
    aFrame.timestamp = aFrame.timestamp + self.audioOffset;
    
    WEAK_SELF
    dispatch_async(self.record_queue, ^{
        STRONG_SELF
        [strongSelf pushAudioFrameSync:aFrame];
    });
}

- (void)pushAudioFrameSync:(TRTCAudioFrame *)aFrame {
    int channels = aFrame.channels;
    uint32_t sampleRate = (uint32_t)aFrame.sampleRate;
    uint32_t len = (uint32_t)aFrame.data.length;
    void *data = (void *)aFrame.data.bytes;
    CMTime pts = CMTimeMakeWithSeconds(aFrame.timestamp / 1000.0, sampleRate);

    AudioStreamBasicDescription asbd;
    asbd.mSampleRate = sampleRate;
    asbd.mFormatID = kAudioFormatLinearPCM;
    asbd.mFormatFlags = kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsSignedInteger;
    asbd.mFramesPerPacket = 1;
    asbd.mChannelsPerFrame = channels;
    asbd.mBitsPerChannel = 16;
    asbd.mReserved = 0;
    asbd.mBytesPerFrame = asbd.mBitsPerChannel / 8 * asbd.mChannelsPerFrame;
    asbd.mBytesPerPacket = asbd.mBytesPerFrame * asbd.mFramesPerPacket;
    
    AudioBufferList audioBufferList;
    audioBufferList.mNumberBuffers = 1;
    audioBufferList.mBuffers[0].mNumberChannels= channels;
    audioBufferList.mBuffers[0].mDataByteSize= len;
    audioBufferList.mBuffers[0].mData = data;
 
    CMSampleBufferRef buff = NULL;
    static CMFormatDescriptionRef format = NULL;
    CMSampleTimingInfo timing = {CMTimeMake(1, sampleRate), pts, kCMTimeInvalid};
    OSStatus error = 0;
    if(format == NULL)
        error = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &asbd, 0, NULL, 0, NULL, NULL, &format);
    error = CMSampleBufferCreate(kCFAllocatorDefault, NULL, false, NULL, NULL, format, len/(2*channels), 1, &timing, 0, NULL, &buff);
    if ( error ) {
        NSLog(@"CMSampleBufferCreate returned error: %ld", (long)error);
        return;
    }
    error = CMSampleBufferSetDataBufferFromAudioBufferList(buff, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList);
    if( error )
    {
        NSLog(@"CMSampleBufferSetDataBufferFromAudioBufferList returned error: %ld", (long)error);
    }
    
    if (self.writer.status == AVAssetWriterStatusWriting && self.audioInput.readyForMoreMediaData && self.status == Mp4RecorderStatusRecording) {
//        printf("audio add pts: %f\n", CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(buff)));
        if(![self.audioInput appendSampleBuffer:buff]) {
            // 添加音频帧失败
            self.status = Mp4RecorderStatusStoped;
            [self sendEvent:TRTCRemoteRecordAudioEncodeError detail:@"音频编码失败"];
        };
//        NSLog(@"audio write success");
    }
    else{
//        NSLog(@"warn: audio lost buffer");
    }
    CFRelease(buff);
}

- (BOOL)addVideoInput:(int32_t)width height:(int32_t)height
{
    CMFormatDescriptionRef videoFormat = nil;
    CMVideoFormatDescriptionCreate(kCFAllocatorDefault,
                                   kCMVideoCodecType_H264,
                                   width,
                                   height,
                                   NULL,
                                   &videoFormat);
    
    NSDictionary *compressProperties = @{
        AVVideoAverageBitRateKey : @(self.config.videoBitrate * 1000),
        AVVideoExpectedSourceFrameRateKey : @(self.config.videoFps),
        AVVideoMaxKeyFrameIntervalKey : @(self.config.videoGop),
//        AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel
    };
    
    NSDictionary *videoSettings = @{
        AVVideoCodecKey: AVVideoCodecTypeH264,
        AVVideoWidthKey: @(width),
        AVVideoHeightKey: @(height),
        AVVideoCompressionPropertiesKey: compressProperties
    };
    
    self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                     outputSettings:videoSettings
                                                   sourceFormatHint:videoFormat];
    //MARK: -- 编码时是否做实时优化
    self.videoInput.expectsMediaDataInRealTime = YES;
    //MARK: -- 这个视频参数屏蔽，打开了会导致录屏失败，暂不知用处（performsMultiPassEncodingIfSupported  是iOS在编码时的一个多次传递优化。可能是系统Bug）
//    self.videoInput.performsMultiPassEncodingIfSupported = YES;
    
    NSDictionary *attr = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];
    self.videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput sourcePixelBufferAttributes:attr];
    
    CFRelease(videoFormat) ;
    
    if (![self.writer canAddInput:self.videoInput]) {
//        NSLog(@"error: video Writer add failed\n");
        return NO;
    }
    [self.writer addInput:self.videoInput];
    return YES ;
}

-(BOOL)addAudioInput:(int)sampleRate channels:(int)channels
{
    NSDictionary *audioSettings = @{
        AVFormatIDKey: @(kAudioFormatMPEG4AAC),
        AVSampleRateKey: @(sampleRate),
        AVNumberOfChannelsKey: @(channels)
    };
    
    self.audioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
    self.audioInput.expectsMediaDataInRealTime = YES;
    //MARK: -- 这个音频参数不能屏蔽，屏蔽了会导致录制的视频播放不了
    self.audioInput.performsMultiPassEncodingIfSupported = YES;
    
    if (![self.writer canAddInput:self.audioInput]) {
//        NSLog(@"error: audio Writer add failed\n");
        return NO;
    }
    [self.writer addInput:self.audioInput];
    return YES ;
}

- (void)sendEvent:(TRTCRemoteRecordEvent)event detail:(NSString *)detail {
    if (self.delegate && [self.delegate respondsToSelector:@selector(onRemoteRecordEvent:detail:)]) {
        [self.delegate onRemoteRecordEvent:event detail:detail];
    }
}

@end

@implementation TRTCRecordConfig

- (instancetype)init {
    if (self = [super init]) {
        self.audioBitrate = 64;
        self.audioChannel = 2;
        self.audioSample = 48000;
        self.videoBitrate = 2000;
        self.videoFps = 20;
        self.videoGop = 15;
    }
    return self;
}

@end
